Compare commits
207 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ced6fa7d9 | ||
|
|
f9405ed2fd | ||
|
|
14c874e701 | ||
|
|
7273fe410d | ||
|
|
23a54e3a5e | ||
|
|
10bec0fa36 | ||
|
|
39803621b1 | ||
|
|
154f691fbd | ||
|
|
770c28835f | ||
|
|
5fb1cabfbb | ||
|
|
f6a23b5e3d | ||
|
|
fc271a37f8 | ||
|
|
6ef697f6b7 | ||
|
|
17e958e696 | ||
|
|
8318a78fd0 | ||
|
|
89a4b08b36 | ||
|
|
52730a5f90 | ||
|
|
bf3d118dbd | ||
|
|
72d0e69b3b | ||
|
|
fc3930663a | ||
|
|
9c17ee09bb | ||
|
|
be84b18376 | ||
|
|
d7ac0c8260 | ||
|
|
40a50e02e5 | ||
|
|
c93db7b42b | ||
|
|
99091ad42c | ||
|
|
7d1cfd84ad | ||
|
|
158abf9d74 | ||
|
|
d90653d7f6 | ||
|
|
29571159fd | ||
|
|
47f1b53ca0 | ||
|
|
c10d92a0c3 | ||
|
|
1f18220dfa | ||
|
|
a4a2d6e29c | ||
|
|
786fbba428 | ||
|
|
8743270035 | ||
|
|
73d9b2e585 | ||
|
|
092c7e7260 | ||
|
|
8b3761656f | ||
|
|
c89f707966 | ||
|
|
b1764160c5 | ||
|
|
7e1e33d51f | ||
|
|
2a81418738 | ||
|
|
f72a961bb4 | ||
|
|
e398bf8a24 | ||
|
|
7787a5cf0e | ||
|
|
0786969ef2 | ||
|
|
98040ad44e | ||
|
|
ad4ab65a9f | ||
|
|
e05113ea27 | ||
|
|
c78898edf7 | ||
|
|
9f6be1f471 | ||
|
|
f8a48637ec | ||
|
|
df73c567ce | ||
|
|
47ce8258b0 | ||
|
|
e523d9a1cb | ||
|
|
d65632c097 | ||
|
|
29bf4f72f3 | ||
|
|
ee79b9939b | ||
|
|
156ec5b441 | ||
|
|
931028b43e | ||
|
|
687c4d35ed | ||
|
|
71ac036dc3 | ||
|
|
da3ab42f10 | ||
|
|
537fc1caad | ||
|
|
0ec0eaf937 | ||
|
|
1babda2e69 | ||
|
|
e319ea82d0 | ||
|
|
63c8f63acf | ||
|
|
4f7d3131c5 | ||
|
|
6a8609c7f8 | ||
|
|
d9477501ae | ||
|
|
be46540959 | ||
|
|
773f20f861 | ||
|
|
171386baaf | ||
|
|
aa2716c2c0 | ||
|
|
1904a809a4 | ||
|
|
f8dddc366b | ||
|
|
f7bc5228ec | ||
|
|
4782900da5 | ||
|
|
c011ff81b9 | ||
|
|
cb768ce953 | ||
|
|
5d08c040b9 | ||
|
|
7a0f38d10e | ||
|
|
1e9ab02e3f | ||
|
|
5e378bd21b | ||
|
|
a2e5009f42 | ||
|
|
44cc0dad94 | ||
|
|
68de8e57be | ||
|
|
fde59411da | ||
|
|
7d48dd0ce4 | ||
|
|
699a7a00f5 | ||
|
|
16d55c8973 | ||
|
|
be0b9415b4 | ||
|
|
93df58a8d5 | ||
|
|
999e24902c | ||
|
|
d19208abec | ||
|
|
8f54676b97 | ||
|
|
61bbc9ab7f | ||
|
|
9e802a59c7 | ||
|
|
38e1e632a7 | ||
|
|
89cf78cc4a | ||
|
|
cecd47d679 | ||
|
|
ed35e577af | ||
|
|
f54cf40eda | ||
|
|
5314e6f8bb | ||
|
|
b7b2709177 | ||
|
|
34c47e84c0 | ||
|
|
f7d91184a0 | ||
|
|
eeddc860f7 | ||
|
|
bcefdd209b | ||
|
|
3f1fea2d19 | ||
|
|
665322a69a | ||
|
|
3e59bc3b38 | ||
|
|
1752931dde | ||
|
|
4b9bae1656 | ||
|
|
74c08fa8aa | ||
|
|
628aad8f64 | ||
|
|
39c8672e6d | ||
|
|
620991ddee | ||
|
|
ba8f28f623 | ||
|
|
6389055d3a | ||
|
|
4465ed9819 | ||
|
|
8dc97e5d01 | ||
|
|
a037c50961 | ||
|
|
28d5f02e15 | ||
|
|
e65a353fc4 | ||
|
|
42400e7836 | ||
|
|
cdd7a2008b | ||
|
|
c2b80fddd8 | ||
|
|
8eaa8119e6 | ||
|
|
5fccadd41e | ||
|
|
5d0ab340e3 | ||
|
|
5d7e9199de | ||
|
|
5e2c65a650 | ||
|
|
1f5553d2d8 | ||
|
|
40d5ab050f | ||
|
|
1629ba11b2 | ||
|
|
6e94f138d5 | ||
|
|
1b7273db42 | ||
|
|
8857903831 | ||
|
|
69cacb5fe3 | ||
|
|
ba4b958114 | ||
|
|
6d971ef2c8 | ||
|
|
03ff37db92 | ||
|
|
428126ef75 | ||
|
|
aee242c52a | ||
|
|
1299f78e80 | ||
|
|
fc3d13d5bc | ||
|
|
6d2f7b0c9e | ||
|
|
84128cab28 | ||
|
|
8f2d2784f8 | ||
|
|
37ee677a67 | ||
|
|
1729744a9e | ||
|
|
b59903c890 | ||
|
|
5063e68562 | ||
|
|
3cbb572c7c | ||
|
|
bb4f11a239 | ||
|
|
8a1750691b | ||
|
|
5cffaf9d73 | ||
|
|
a9e50b28df | ||
|
|
24512639fb | ||
|
|
1355d043b6 | ||
|
|
c7600ef9ed | ||
|
|
a4cb0d6432 | ||
|
|
437368bedb | ||
|
|
4057b193d6 | ||
|
|
89b1b5c7f6 | ||
|
|
60d3438277 | ||
|
|
a578a10b5b | ||
|
|
5cf24af00b | ||
|
|
6c0e455146 | ||
|
|
932a946868 | ||
|
|
99a1cfbff9 | ||
|
|
d7bbdde1e7 | ||
|
|
7dba98dce8 | ||
|
|
c1ae30bd82 | ||
|
|
6aa5aea424 | ||
|
|
bc1b00813c | ||
|
|
d1ad3ab301 | ||
|
|
923134bbdc | ||
|
|
e211f69df5 | ||
|
|
fc35d706a0 | ||
|
|
82894e6aff | ||
|
|
7356f157bb | ||
|
|
783fc6268a | ||
|
|
360b17f299 | ||
|
|
2cfcdaff7c | ||
|
|
9d9cf46e47 | ||
|
|
98661cf9a2 | ||
|
|
cc50cd5e3a | ||
|
|
d8399d2d23 | ||
|
|
f2134fb2f8 | ||
|
|
ec3ccc004e | ||
|
|
6cb246c18a | ||
|
|
e73cea0ecf | ||
|
|
c69e185a2a | ||
|
|
5789f59222 | ||
|
|
5178eeb340 | ||
|
|
bc5e7fa4a2 | ||
|
|
c28ace6d40 | ||
|
|
de4fae37e1 | ||
|
|
2f1aff3ec3 | ||
|
|
6970f934bd | ||
|
|
6b5168e102 | ||
|
|
4420edb4dc | ||
|
|
c2fae95fee |
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -6,6 +6,7 @@ Make sure that:
|
||||
-->
|
||||
|
||||
- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc).
|
||||
- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO).
|
||||
- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes.
|
||||
- [ ] You submit test cases (unit or integration tests) that back your changes.
|
||||
- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only).
|
||||
|
||||
47
.github/workflows/project.yml
vendored
47
.github/workflows/project.yml
vendored
@@ -1,47 +0,0 @@
|
||||
# GitHub Actions to automate GitHub issues for Spring Data Project Management
|
||||
|
||||
name: Spring Data GitHub Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, edited, reopened]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_target:
|
||||
types: [opened, edited, reopened]
|
||||
|
||||
jobs:
|
||||
Inbox:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null
|
||||
steps:
|
||||
- name: Create or Update Issue Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Inbox'
|
||||
project-location: 'spring-projects'
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
Pull-Request:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null
|
||||
steps:
|
||||
- name: Create or Update Pull Request Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Review pending'
|
||||
project-location: 'spring-projects'
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
Feedback-Provided:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback')
|
||||
steps:
|
||||
- name: Update Project Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Feedback provided'
|
||||
project-location: 'spring-projects'
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
3
.mvn/wrapper/maven-wrapper.properties
vendored
3
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1 @@
|
||||
#Mon Oct 11 14:30:24 CEST 2021
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.3/apache-maven-3.8.3-bin.zip
|
||||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
|
||||
2
CI.adoc
2
CI.adoc
@@ -1,6 +1,6 @@
|
||||
= Continuous Integration
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
|
||||
27
CODE_OF_CONDUCT.adoc
Normal file
27
CODE_OF_CONDUCT.adoc
Normal file
@@ -0,0 +1,27 @@
|
||||
= Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
|
||||
|
||||
We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery
|
||||
* Personal attacks
|
||||
* Trolling or insulting/derogatory comments
|
||||
* Public or private harassment
|
||||
* Publishing other's private information, such as physical or electronic addresses,
|
||||
without explicit permission
|
||||
* Other unethical or unprofessional conduct
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io.
|
||||
All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
|
||||
Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
This Code of Conduct is adapted from the https://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at https://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/].
|
||||
@@ -1,3 +1,3 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here].
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
|
||||
251
Jenkinsfile
vendored
251
Jenkinsfile
vendored
@@ -3,7 +3,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/2.2.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
@@ -14,22 +14,6 @@ pipeline {
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 5.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-5.0/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-5.0.0", "ci/openjdk8-mongodb-5.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
@@ -39,181 +23,127 @@ pipeline {
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0.23", "ci/openjdk8-mongodb-4.0/")
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.4') {
|
||||
stage('Publish JDK 8 + MongoDB 4.1') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.4/**"
|
||||
changeset "ci/openjdk8-mongodb-4.1/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.4.4", "ci/openjdk8-mongodb-4.4/")
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.1", "ci/openjdk8-mongodb-4.1/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 16 + MongoDB 4.4') {
|
||||
when {
|
||||
changeset "ci/openjdk16-mongodb-4.4/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk16-with-mongodb-4.4.4", "ci/openjdk16-mongodb-4.4/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk8)") {
|
||||
stage("test: baseline") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
branch '2.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
label 'data'
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.0.23:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
allOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
anyOf {
|
||||
branch '2.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.0 (jdk8)") {
|
||||
stage("test: mongodb 4.0") {
|
||||
agent {
|
||||
label 'data'
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.0.23:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 4.4 (jdk8)") {
|
||||
stage("test: mongodb 4.1") {
|
||||
agent {
|
||||
label 'data'
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.1:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 5.0 (jdk8)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-5.0.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk16)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk16-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -221,14 +151,17 @@ pipeline {
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
branch '2.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
label 'data'
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
@@ -237,21 +170,43 @@ pipeline {
|
||||
}
|
||||
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch '2.2.x'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.distribution-repository=temp-private-local " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
259
README.adoc
259
README.adoc
@@ -1,6 +1,6 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
@@ -8,12 +8,10 @@ The Spring Data MongoDB project aims to provide a familiar and consistent Spring
|
||||
The Spring Data MongoDB project provides integration with the MongoDB document database.
|
||||
Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer.
|
||||
|
||||
[[code-of-conduct]]
|
||||
== Code of Conduct
|
||||
|
||||
This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
This project is governed by the link:CODE_OF_CONDUCT.adoc[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
|
||||
[[getting-started]]
|
||||
== Getting Started
|
||||
|
||||
Here is a quick teaser of an application using Spring Data Repositories in Java:
|
||||
@@ -52,7 +50,12 @@ public class MyService {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
@@ -61,7 +64,6 @@ class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
}
|
||||
----
|
||||
|
||||
[[maven-configuration]]
|
||||
=== Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
@@ -71,166 +73,27 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}</version>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository
|
||||
and declare the appropriate dependency version.
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}-SNAPSHOT</version>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-snapshot</id>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/snapshot</url>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Upgrading from 2.x
|
||||
|
||||
The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions.
|
||||
Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter.
|
||||
|
||||
=== XML Namespace
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
| Now exposes a `com.mongodb.client.MongoClient`
|
||||
|
||||
| `<mongo:mongo-client replica-set="..." />`
|
||||
| Was a comma delimited list of replica set members (host/port)
|
||||
| Now defines the replica set name. +
|
||||
Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
| `<mongo:db-factory writeConcern="..." />`
|
||||
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
|
||||
| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY
|
||||
|===
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Referencing a `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `<mongo:mongo-client credentials="..." />`
|
||||
| `<mongo:mongo-client credential="..." />`
|
||||
| Single authentication data instead of list.
|
||||
|
||||
| `<mongo:client-options />`
|
||||
| `<mongo:client-settings />`
|
||||
| See `com.mongodb.MongoClientSettings` for details.
|
||||
|===
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
|
||||
| `<mongo:db-factory connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:mongo-client connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:client-settings />`
|
||||
| Namespace element for `com.mongodb.MongoClientSettings`.
|
||||
|
||||
|===
|
||||
|
||||
=== Java Configuration
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
| Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
| `MongoDataIntegrityViolationException`
|
||||
| Uses `WriteConcernResult` instead of `WriteResult`.
|
||||
|
||||
| `BulkOperationException`
|
||||
| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError`
|
||||
|
||||
| `ReactiveMongoClientFactoryBean`
|
||||
| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|
||||
| `ReactiveMongoClientSettingsFactoryBean`
|
||||
| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|===
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
| 2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
| Creating a `com.mongodb.MongoClientSettings`.
|
||||
|
||||
| `AbstractMongoConfiguration`
|
||||
| `AbstractMongoClientConfiguration` +
|
||||
(Available since 2.1)
|
||||
| Using `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `MongoDbFactory#getLegacyDb()`
|
||||
| -
|
||||
| -
|
||||
|
||||
| `SimpleMongoDbFactory`
|
||||
| `SimpleMongoClientDbFactory` +
|
||||
(Available since 2.1)
|
||||
|
|
||||
|
||||
| `MapReduceOptions#getOutputType()`
|
||||
| `MapReduceOptions#getMapReduceAction()`
|
||||
| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`.
|
||||
|
||||
| `Meta\|Query` maxScan & snapshot
|
||||
|
|
||||
|
|
||||
|===
|
||||
|
||||
=== Other Changes
|
||||
|
||||
==== UUID Types
|
||||
|
||||
The MongoDB UUID representation can now be configured with different formats.
|
||||
This has to be done via `MongoClientSettings` as shown in the snippet below.
|
||||
|
||||
.UUID Codec Configuration
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.STANDARD);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[[getting-help]]
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
@@ -244,7 +107,6 @@ If you are just starting out with Spring, try one of the https://spring.io/guide
|
||||
You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
|
||||
* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues].
|
||||
|
||||
[[reporting-issues]]
|
||||
== Reporting Issues
|
||||
|
||||
Spring Data uses Github as issue tracking system to record bugs and feature requests.
|
||||
@@ -255,86 +117,10 @@ If you want to raise an issue, please follow the recommendations below:
|
||||
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc.
|
||||
* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++.
|
||||
|
||||
[[guides]]
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
[[examples]]
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
[[building-from-source]]
|
||||
== Building from Source
|
||||
|
||||
You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io]
|
||||
and accessible from Maven using the Maven configuration noted <<maven-configuration,above>>.
|
||||
|
||||
NOTE: Configuration for Gradle is similar to Maven.
|
||||
|
||||
The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
|
||||
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
||||
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
||||
to build a reactive one.
|
||||
|
||||
However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper]
|
||||
and minimally, JDK 8 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
||||
|
||||
In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download]
|
||||
and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
|
||||
|
||||
Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to
|
||||
your MongoDB installation directory (e.g. `MONGODB_HOME`).
|
||||
|
||||
To run the full test suite, a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set]
|
||||
is required.
|
||||
|
||||
To run the MongoDB server enter the following command from a command-line:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0
|
||||
...
|
||||
"msg":"Successfully connected to host"
|
||||
----
|
||||
|
||||
Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_".
|
||||
|
||||
Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set
|
||||
the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`).
|
||||
|
||||
You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started.
|
||||
To initialize the replica set, start a mongo client:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongo
|
||||
MongoDB server version: 5.0.0
|
||||
...
|
||||
----
|
||||
|
||||
Then enter the following command:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] })
|
||||
----
|
||||
|
||||
Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`.
|
||||
In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation):
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ulimit -n 32768
|
||||
----
|
||||
|
||||
You can use `ulimit -a` again to verify the `ulimit` for "_open files_" was set appropriately.
|
||||
|
||||
Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command:
|
||||
You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
|
||||
You also need JDK 1.8.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
@@ -343,8 +129,7 @@ Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw
|
||||
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
|
||||
the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
|
||||
=== Building reference documentation
|
||||
|
||||
@@ -357,7 +142,17 @@ Building the documentation builds also the project without running tests.
|
||||
|
||||
The generated documentation is available from `target/site/reference/html/index.html`.
|
||||
|
||||
[[license]]
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
== License
|
||||
|
||||
Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license].
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk11:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,18 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk16:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,18 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.0.23 mongodb-org-server=4.0.23 mongodb-org-shell=4.0.23 mongodb-org-mongos=4.0.23 mongodb-org-tools=4.0.23 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.9 mongodb-org-server=4.0.9 mongodb-org-shell=4.0.9 mongodb-org-mongos=4.0.9 mongodb-org-tools=4.0.9
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 4B7C549A058F8B6B
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.1 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.1.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org-unstable=4.1.13 mongodb-org-unstable-server=4.1.13 mongodb-org-unstable-shell=4.1.13 mongodb-org-unstable-mongos=4.1.13 mongodb-org-unstable-tools=4.1.13
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/openjdk8-mongodb-4.2/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
ln -T /bin/true /usr/bin/systemctl ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
rm /usr/bin/systemctl ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \
|
||||
# MongoDB 5.0 release signing key
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \
|
||||
# Needed when MongoDB creates a 5.0 folder.
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update; \
|
||||
apt-get install -y mongodb-org=5.0.3 mongodb-org-server=5.0.3 mongodb-org-shell=5.0.3 mongodb-org-mongos=5.0.3 mongodb-org-tools=5.0.3; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
26
pom.xml
26
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-M1</version>
|
||||
<version>2.2.14.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.7.0-M1</version>
|
||||
<version>2.2.14.BUILD-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,9 +26,9 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.7.0-M1</springdata.commons>
|
||||
<mongo>4.4.1</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<springdata.commons>2.2.14.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>3.11.2</mongo>
|
||||
<mongo.reactivestreams>1.12.0</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -127,25 +127,15 @@
|
||||
<!-- MongoDB -->
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-M1</version>
|
||||
<version>2.2.14.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-M1</version>
|
||||
<version>2.2.14.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-M1</version>
|
||||
<version>2.2.14.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -65,12 +65,6 @@
|
||||
<artifactId>querydsl-mongodb</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -87,22 +81,7 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-sync</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
@@ -110,6 +89,23 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-async</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>bson</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
@@ -143,13 +139,6 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex.rxjava3</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava3}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- CDI -->
|
||||
<!-- Dependency order required to build against CDI 1.0 and test with CDI 2.0 -->
|
||||
<dependency>
|
||||
@@ -206,14 +195,7 @@
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.4.3.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.glassfish</groupId>
|
||||
<artifactId>javax.el</artifactId>
|
||||
<version>3.0.1-b11</version>
|
||||
<version>5.2.4.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -237,6 +219,13 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>nl.jqno.equalsverifier</groupId>
|
||||
<artifactId>equalsverifier</artifactId>
|
||||
@@ -264,13 +253,6 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.junit-pioneer</groupId>
|
||||
<artifactId>junit-pioneer</artifactId>
|
||||
<version>0.5.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>jta</artifactId>
|
||||
@@ -310,15 +292,6 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- jMolecules -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jmolecules</groupId>
|
||||
<artifactId>jmolecules-ddd</artifactId>
|
||||
<version>${jmolecules}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
@@ -367,6 +340,12 @@
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.DocumentCodec;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json})
|
||||
* expression. The expression will be wrapped within <code>{ ... }</code> if necessary. The actual parsing and parameter
|
||||
* binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via
|
||||
* {@link #toDocument()}.
|
||||
* <br />
|
||||
*
|
||||
* <pre class="code">
|
||||
* $toUpper : $name -> { '$toUpper' : '$name' }
|
||||
*
|
||||
* { '$toUpper' : '$name' } -> { '$toUpper' : '$name' }
|
||||
*
|
||||
* { '$toUpper' : '?0' }, "$name" -> { '$toUpper' : '$name' }
|
||||
* </pre>
|
||||
*
|
||||
* Some types might require a special {@link org.bson.codecs.Codec}. If so, make sure to provide a {@link CodecRegistry}
|
||||
* containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
public class BindableMongoExpression implements MongoExpression {
|
||||
|
||||
private final String expressionString;
|
||||
|
||||
private final @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private final @Nullable Object[] args;
|
||||
|
||||
private final Lazy<Document> target;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link BindableMongoExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param args can be {@literal null}.
|
||||
*/
|
||||
public BindableMongoExpression(String expression, @Nullable Object[] args) {
|
||||
this(expression, null, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link BindableMongoExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param codecRegistryProvider can be {@literal null}.
|
||||
* @param args can be {@literal null}.
|
||||
*/
|
||||
public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider,
|
||||
@Nullable Object[] args) {
|
||||
|
||||
this.expressionString = expression;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.args = args;
|
||||
this.target = Lazy.of(this::parse);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the {@link CodecRegistry} used to convert expressions.
|
||||
*
|
||||
* @param codecRegistry must not be {@literal null}.
|
||||
* @return new instance of {@link BindableMongoExpression}.
|
||||
*/
|
||||
public BindableMongoExpression withCodecRegistry(CodecRegistry codecRegistry) {
|
||||
return new BindableMongoExpression(expressionString, () -> codecRegistry, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the arguments to bind to the placeholders via their index.
|
||||
*
|
||||
* @param args must not be {@literal null}.
|
||||
* @return new instance of {@link BindableMongoExpression}.
|
||||
*/
|
||||
public BindableMongoExpression bind(Object... args) {
|
||||
return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoExpression#toDocument()
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument() {
|
||||
return target.get();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
|
||||
+ Arrays.toString(args) + '}';
|
||||
}
|
||||
|
||||
private Document parse() {
|
||||
|
||||
String expression = wrapJsonIfNecessary(expressionString);
|
||||
|
||||
if (ObjectUtils.isEmpty(args)) {
|
||||
|
||||
if (codecRegistryProvider == null) {
|
||||
return Document.parse(expression);
|
||||
}
|
||||
|
||||
return Document.parse(expression, codecRegistryProvider.getCodecFor(Document.class)
|
||||
.orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry())));
|
||||
}
|
||||
|
||||
ParameterBindingDocumentCodec codec = codecRegistryProvider == null ? new ParameterBindingDocumentCodec()
|
||||
: new ParameterBindingDocumentCodec(codecRegistryProvider.getCodecRegistry());
|
||||
return codec.decode(expression, args);
|
||||
}
|
||||
|
||||
private static String wrapJsonIfNecessary(String json) {
|
||||
|
||||
if (StringUtils.hasText(json) && (json.startsWith("{") && json.endsWith("}"))) {
|
||||
return json;
|
||||
}
|
||||
|
||||
return "{" + json + "}";
|
||||
}
|
||||
}
|
||||
@@ -19,9 +19,9 @@ import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.BulkWriteError;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Is thrown when errors occur during bulk operations.
|
||||
@@ -38,12 +38,12 @@ public class BulkOperationException extends DataAccessException {
|
||||
private final BulkWriteResult result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link MongoBulkWriteException}.
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
|
||||
*
|
||||
* @param message must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
public BulkOperationException(String message, MongoBulkWriteException source) {
|
||||
public BulkOperationException(String message, BulkWriteException source) {
|
||||
|
||||
super(message, source);
|
||||
|
||||
|
||||
@@ -20,8 +20,8 @@ import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Helper class featuring helper methods for working with MongoDb collections.
|
||||
* <br />
|
||||
* <br />
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Mainly intended for internal use within the framework.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Interface for factories creating {@link MongoDatabase} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface MongoDatabaseFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} from the underlying factory.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDatabaseFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDatabaseFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -27,10 +27,10 @@ import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDbFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -41,95 +41,93 @@ import com.mongodb.client.MongoDatabase;
|
||||
public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory) {
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory) {
|
||||
public static MongoDatabase getDatabase(@Nullable String dbName, MongoDbFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||
public static MongoDatabase getDatabase(@Nullable String dbName, MongoDbFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDbFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER
|
||||
|| !TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
}
|
||||
|
||||
ClientSession session = doGetSession(factory, sessionSynchronization);
|
||||
|
||||
if (session == null) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
}
|
||||
|
||||
MongoDatabaseFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getMongoDatabase(dbName) : factoryToUse.getMongoDatabase();
|
||||
MongoDbFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getDb(dbName) : factoryToUse.getDb();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the {@link MongoDatabaseFactory} is actually bound to a {@link ClientSession} that has an active
|
||||
* transaction, or if a {@link TransactionSynchronization} has been registered for the {@link MongoDatabaseFactory
|
||||
* resource} and if the associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active
|
||||
* transaction}.
|
||||
* Check if the {@link MongoDbFactory} is actually bound to a {@link ClientSession} that has an active transaction, or
|
||||
* if a {@link TransactionSynchronization} has been registered for the {@link MongoDbFactory resource} and if the
|
||||
* associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param dbFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return {@literal true} if the factory has an ongoing transaction.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
public static boolean isTransactionActive(MongoDatabaseFactory dbFactory) {
|
||||
public static boolean isTransactionActive(MongoDbFactory dbFactory) {
|
||||
|
||||
if (dbFactory.isTransactionActive()) {
|
||||
return true;
|
||||
@@ -140,8 +138,7 @@ public class MongoDatabaseUtils {
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static ClientSession doGetSession(MongoDatabaseFactory dbFactory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory);
|
||||
|
||||
@@ -172,7 +169,7 @@ public class MongoDatabaseUtils {
|
||||
return resourceHolder.getSession();
|
||||
}
|
||||
|
||||
private static ClientSession createClientSession(MongoDatabaseFactory dbFactory) {
|
||||
private static ClientSession createClientSession(MongoDbFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
@@ -187,7 +184,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
private final MongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDatabaseFactory dbFactory) {
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDbFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory);
|
||||
this.resourceHolder = resourceHolder;
|
||||
|
||||
@@ -15,8 +15,14 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -25,33 +31,92 @@ import com.mongodb.client.MongoDatabase;
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactory} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public interface MongoDbFactory extends MongoDatabaseFactory {
|
||||
public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb() throws DataAccessException {
|
||||
return getMongoDatabase();
|
||||
}
|
||||
MongoDatabase getDb() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
* Creates a {@link DB} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead.
|
||||
*/
|
||||
MongoDatabase getDb(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the legacy database entry point. Please consider {@link #getDb()} instead.
|
||||
*
|
||||
* @return
|
||||
* @deprecated since 2.1, use {@link #getDb()}. This method will be removed with a future version as it works only
|
||||
* with the legacy MongoDB driver.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return getMongoDatabase(dbName);
|
||||
DB getLegacyDb();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getDb().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDbFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDbFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDbFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
/**
|
||||
* Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when
|
||||
* passed on to the driver.
|
||||
* <br />
|
||||
* A set of predefined {@link MongoExpression expressions}, including a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method
|
||||
* like expressions (eg. {@code toUpper(name)}) are available via the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation Aggregation API}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ArithmeticOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ArrayOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ComparisonOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConvertOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.DateOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ObjectOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SetOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.StringOperators
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface MongoExpression {
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoExpression} from plain {@link String} (eg. {@code $toUpper : $name}). <br />
|
||||
* The given expression will be wrapped with <code>{ ... }</code> to match an actual MongoDB {@link org.bson.Document}
|
||||
* if necessary.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MongoExpression}.
|
||||
*/
|
||||
static MongoExpression create(String expression) {
|
||||
return new BindableMongoExpression(expression, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoExpression} from plain {@link String} containing placeholders (eg. {@code $toUpper : ?0})
|
||||
* that will be resolved on first call of {@link #toDocument()}. <br />
|
||||
* The given expression will be wrapped with <code>{ ... }</code> to match an actual MongoDB {@link org.bson.Document}
|
||||
* if necessary.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MongoExpression}.
|
||||
*/
|
||||
static MongoExpression create(String expression, Object... args) {
|
||||
return new BindableMongoExpression(expression, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the native {@link org.bson.Document} representation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
org.bson.Document toDocument();
|
||||
}
|
||||
@@ -24,7 +24,7 @@ import com.mongodb.client.ClientSession;
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}.
|
||||
* {@link MongoTransactionManager} binds instances of this class to the thread.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -36,15 +36,15 @@ import com.mongodb.client.ClientSession;
|
||||
class MongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private MongoDatabaseFactory dbFactory;
|
||||
private MongoDbFactory dbFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDatabaseFactory dbFactory) {
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDbFactory dbFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -75,9 +75,9 @@ class MongoResourceHolder extends ResourceHolderSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link MongoDatabaseFactory}.
|
||||
* @return the associated {@link MongoDbFactory}.
|
||||
*/
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
public MongoDbFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
|
||||
@@ -36,19 +36,18 @@ import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
|
||||
* <br />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
|
||||
* <br />
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDbFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDbFactory} to the thread.
|
||||
* <p />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <br />
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
|
||||
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <br />
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDbFactory)} instead of a standard {@link MongoDbFactory#getDb()} call.
|
||||
* Spring classes such as {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
@@ -59,46 +58,46 @@ import com.mongodb.client.ClientSession;
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization)
|
||||
*/
|
||||
public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
implements ResourceTransactionManager, InitializingBean {
|
||||
|
||||
private @Nullable MongoDatabaseFactory dbFactory;
|
||||
private @Nullable MongoDbFactory dbFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <br />
|
||||
* <strong>Note:</strong>The {@link MongoDatabaseFactory db factory} has to be
|
||||
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
|
||||
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link MongoDbFactory db factory} has to be {@link #setDbFactory(MongoDbFactory) set}
|
||||
* before using the instance. Use this constructor to prepare a {@link MongoTransactionManager} via a
|
||||
* {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDbFactory(MongoDatabaseFactory)
|
||||
* @see #setDbFactory(MongoDbFactory)
|
||||
* @see #setTransactionSynchronization(int)
|
||||
*/
|
||||
public MongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory}.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory) {
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory) {
|
||||
this(dbFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}
|
||||
* applying the given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory} applying the
|
||||
* given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
|
||||
@@ -212,8 +211,8 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit. <br />
|
||||
* <pre>
|
||||
* <code>
|
||||
* <pre>
|
||||
* int retries = 3;
|
||||
* do {
|
||||
* try {
|
||||
@@ -226,8 +225,8 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* }
|
||||
* Thread.sleep(500);
|
||||
* } while (--retries > 0);
|
||||
* </pre>
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @param transactionObject never {@literal null}.
|
||||
* @throws Exception in case of transaction errors.
|
||||
@@ -296,11 +295,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoDatabaseFactory} that this instance should manage transactions for.
|
||||
* Set the {@link MongoDbFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
||||
public void setDbFactory(MongoDbFactory dbFactory) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -316,12 +315,12 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link MongoDatabaseFactory} that this instance manages transactions for.
|
||||
* Get the {@link MongoDbFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
public MongoDbFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
@@ -330,7 +329,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory getResourceFactory() {
|
||||
public MongoDbFactory getResourceFactory() {
|
||||
return getRequiredDbFactory();
|
||||
}
|
||||
|
||||
@@ -345,7 +344,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) {
|
||||
|
||||
MongoDatabaseFactory dbFactory = getResourceFactory();
|
||||
MongoDbFactory dbFactory = getResourceFactory();
|
||||
|
||||
MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory);
|
||||
resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition));
|
||||
@@ -356,7 +355,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #dbFactory} is {@literal null}.
|
||||
*/
|
||||
private MongoDatabaseFactory getRequiredDbFactory() {
|
||||
private MongoDbFactory getRequiredDbFactory() {
|
||||
|
||||
Assert.state(dbFactory != null,
|
||||
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
@@ -31,7 +31,6 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
@@ -42,16 +41,16 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
Mono<MongoDatabase> getMongoDatabase() throws DataAccessException;
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
* Creates a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException;
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
@@ -65,7 +64,10 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
CodecRegistry getCodecRegistry();
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}.
|
||||
|
||||
@@ -36,12 +36,11 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
@@ -75,7 +74,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -88,7 +87,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -104,7 +103,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -119,7 +118,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -138,22 +137,19 @@ public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER) {
|
||||
return getMongoDatabaseOrDefault(dbName, factory);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.flatMap(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> getMongoDatabaseOrDefault(dbName, factory))
|
||||
.switchIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
.map(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
})
|
||||
.onErrorResume(NoTransactionException.class,
|
||||
e -> Mono.fromSupplier(() -> getMongoDatabaseOrDefault(dbName, factory)))
|
||||
.defaultIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static Mono<MongoDatabase> getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
private static MongoDatabase getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
@@ -42,7 +42,7 @@ class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
* Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
@@ -99,7 +99,7 @@ class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
* If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a
|
||||
* {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current
|
||||
* bound session is returned.
|
||||
*
|
||||
*
|
||||
* @param session
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -38,21 +38,21 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <br />
|
||||
* <p />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <br />
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <br />
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
@@ -71,11 +71,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mongodb.session.ClientSession;
|
||||
/**
|
||||
* {@link MethodInterceptor} implementation looking up and invoking an alternative target method having
|
||||
* {@link ClientSession} as its first argument. This allows seamless integration with the existing code base.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself
|
||||
* like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them
|
||||
* if not already proxied.
|
||||
|
||||
@@ -15,20 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
|
||||
/**
|
||||
* {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to
|
||||
* participate if any.
|
||||
* {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to
|
||||
* define in which type of transactions to participate if any.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @see MongoTemplate#setSessionSynchronization(SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
* @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization)
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public enum SessionSynchronization {
|
||||
|
||||
@@ -41,12 +34,5 @@ public enum SessionSynchronization {
|
||||
/**
|
||||
* Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}.
|
||||
*/
|
||||
ON_ACTUAL_TRANSACTION,
|
||||
|
||||
/**
|
||||
* Do not participate in ongoing transactions.
|
||||
*
|
||||
* @since 3.2.5
|
||||
*/
|
||||
NEVER;
|
||||
ON_ACTUAL_TRANSACTION;
|
||||
}
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoDriverInformation;
|
||||
|
||||
/**
|
||||
* Class that exposes the SpringData MongoDB specific information like the current {@link Version} or
|
||||
* {@link MongoDriverInformation driver information}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class SpringDataMongoDB {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(SpringDataMongoDB.class);
|
||||
|
||||
private static final Version FALLBACK_VERSION = new Version(3);
|
||||
private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation
|
||||
.builder(MongoDriverInformation.builder().build()).driverName("spring-data").build();
|
||||
|
||||
/**
|
||||
* Obtain the SpringData MongoDB specific driver information.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static MongoDriverInformation driverInformation() {
|
||||
return DRIVER_INFORMATION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the "Implementation-Version" manifest attribute from the jar file.
|
||||
* <br />
|
||||
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the
|
||||
* version in all environments. In this case the current Major version is returned as a fallback.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static Version version() {
|
||||
|
||||
Package pkg = SpringDataMongoDB.class.getPackage();
|
||||
String versionString = (pkg != null ? pkg.getImplementationVersion() : null);
|
||||
|
||||
if (!StringUtils.hasText(versionString)) {
|
||||
|
||||
LOGGER.debug("Unable to find Spring Data MongoDB version.");
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
try {
|
||||
return Version.parse(versionString);
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug(String.format("Cannot read Spring Data MongoDB version '%s'.", versionString));
|
||||
}
|
||||
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -17,22 +17,17 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}.
|
||||
@@ -40,44 +35,40 @@ import com.mongodb.client.MongoClients;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoConfiguration
|
||||
*/
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
@Configuration
|
||||
public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
* @return
|
||||
*/
|
||||
public MongoClient mongoClient() {
|
||||
return createMongoClient(mongoClientSettings());
|
||||
}
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @see #mongoDbFactory()
|
||||
* @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
|
||||
return new MongoTemplate(databaseFactory, converter);
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory} to be used by the
|
||||
* {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}.
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter)
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDatabaseFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -100,32 +91,22 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link com.mongodb.reactivestreams.client.MongoClient} instance with given
|
||||
* {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,121 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.MongoClient}.
|
||||
* <p />
|
||||
* <strong>INFO:</strong>In case you want to use {@link com.mongodb.client.MongoClients} for configuration please refer
|
||||
* to {@link AbstractMongoClientConfiguration}.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Ryan Tenney
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoClientConfiguration
|
||||
* @deprecated since 2.2 in favor of {@link AbstractMongoClientConfiguration}.
|
||||
*/
|
||||
@Configuration
|
||||
@Deprecated
|
||||
public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||
* {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||
* overridden to implement alternate behavior.
|
||||
*
|
||||
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||
* entities.
|
||||
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
protected String getMappingBasePackage() {
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -18,19 +18,13 @@ package org.springframework.data.mongodb.config;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for reactive Spring Data MongoDB configuration using JavaConfig.
|
||||
@@ -40,33 +34,25 @@ import com.mongodb.reactivestreams.client.MongoClients;
|
||||
* @since 2.0
|
||||
* @see MongoConfigurationSupport
|
||||
*/
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
@Configuration
|
||||
public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the Reactive Streams {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
*/
|
||||
public MongoClient reactiveMongoClient() {
|
||||
return createReactiveMongoClient(mongoClientSettings());
|
||||
}
|
||||
public abstract MongoClient reactiveMongoClient();
|
||||
|
||||
/**
|
||||
* Creates {@link ReactiveMongoOperations}.
|
||||
*
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MappingMongoConverter mongoConverter) {
|
||||
return new ReactiveMongoTemplate(databaseFactory, mongoConverter);
|
||||
public ReactiveMongoOperations reactiveMongoTemplate() throws Exception {
|
||||
return new ReactiveMongoTemplate(reactiveMongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -74,7 +60,7 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
* {@link MongoClient} instance configured in {@link #reactiveMongoClient()}.
|
||||
*
|
||||
* @see #reactiveMongoClient()
|
||||
* @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter)
|
||||
* @see #reactiveMongoTemplate()
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
@@ -84,31 +70,21 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext()
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @return never {@literal null}.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(reactiveMongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link MongoClient} instance with given {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createReactiveMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link com.mongodb.ConnectionString}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String connectionString) {
|
||||
|
||||
if (!StringUtils.hasText(connectionString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ConnectionString(connectionString));
|
||||
}
|
||||
}
|
||||
@@ -61,8 +61,8 @@ public @interface EnableMongoAuditing {
|
||||
boolean modifyOnCreate() default true;
|
||||
|
||||
/**
|
||||
* Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting
|
||||
* creation and modification dates.
|
||||
* Configures a {@link DateTimeProvider} bean name that allows customizing the {@link org.joda.time.DateTime} to be
|
||||
* used for setting creation and modification dates.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.data.auditing.DateTimeProvider;
|
||||
import org.springframework.data.domain.ReactiveAuditorAware;
|
||||
|
||||
/**
|
||||
* Annotation to enable auditing in MongoDB using reactive infrastructure via annotation configuration.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 3.1
|
||||
*/
|
||||
@Inherited
|
||||
@Documented
|
||||
@Target(ElementType.TYPE)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Import(ReactiveMongoAuditingRegistrar.class)
|
||||
public @interface EnableReactiveMongoAuditing {
|
||||
|
||||
/**
|
||||
* Configures the {@link ReactiveAuditorAware} bean to be used to lookup the current principal.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
String auditorAwareRef() default "";
|
||||
|
||||
/**
|
||||
* Configures whether the creation and modification dates are set. Defaults to {@literal true}.
|
||||
*
|
||||
* @return {@literal true} by default.
|
||||
*/
|
||||
boolean setDates() default true;
|
||||
|
||||
/**
|
||||
* Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}.
|
||||
*
|
||||
* @return {@literal true} by default.
|
||||
*/
|
||||
boolean modifyOnCreate() default true;
|
||||
|
||||
/**
|
||||
* Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting
|
||||
* creation and modification dates.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
String dateTimeProviderRef() default "";
|
||||
}
|
||||
@@ -96,9 +96,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME;
|
||||
|
||||
String autoIndexCreation = element.getAttribute("auto-index-creation");
|
||||
boolean autoIndexCreationEnabled = StringUtils.hasText(autoIndexCreation) && Boolean.valueOf(autoIndexCreation);
|
||||
|
||||
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element));
|
||||
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
@@ -202,11 +199,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) {
|
||||
return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false);
|
||||
}
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) {
|
||||
|
||||
String ctxRef = element.getAttribute("mapping-context-ref");
|
||||
|
||||
@@ -234,8 +226,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation);
|
||||
|
||||
parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder);
|
||||
|
||||
ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
@@ -27,8 +28,14 @@ import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
import org.springframework.data.config.ParsingUtils;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
@@ -39,6 +46,9 @@ import org.springframework.util.Assert;
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
@@ -81,7 +91,7 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(MongoMappingContextLookup.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
@@ -106,6 +116,68 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(registry, auditingHandlerDefinition.getSource());
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple helper to be able to wire the {@link MappingContext} from a {@link MappingMongoConverter} bean available in
|
||||
* the application context.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class MongoMappingContextLookup
|
||||
implements FactoryBean<MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
|
||||
private final MappingMongoConverter converter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoMappingContextLookup} for the given {@link MappingMongoConverter}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
public MongoMappingContextLookup(MappingMongoConverter converter) {
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> getObject() throws Exception {
|
||||
return converter.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MappingContext.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#isSingleton()
|
||||
*/
|
||||
@Override
|
||||
public boolean isSingleton() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,11 +50,10 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
|
||||
ParsingUtils.setPropertyValue(builder, element, "port", "port");
|
||||
ParsingUtils.setPropertyValue(builder, element, "host", "host");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credential", "credential");
|
||||
ParsingUtils.setPropertyValue(builder, element, "replica-set", "replicaSet");
|
||||
ParsingUtils.setPropertyValue(builder, element, "connection-string", "connectionString");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credentials", "credentials");
|
||||
|
||||
MongoParsingUtils.parseMongoClientSettings(element, builder);
|
||||
MongoParsingUtils.parseMongoClientOptions(element, builder);
|
||||
MongoParsingUtils.parseReplicaSet(element, builder);
|
||||
|
||||
String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME;
|
||||
|
||||
@@ -63,34 +62,22 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId);
|
||||
parserContext.registerBeanComponent(mongoComponent);
|
||||
|
||||
BeanComponentDefinition connectionStringPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getConnectionStringPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(connectionStringPropertyEditor);
|
||||
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getServerAddressPropertyEditorBuilder());
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils
|
||||
.getServerAddressPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(serverAddressPropertyEditor);
|
||||
|
||||
BeanComponentDefinition writeConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getWriteConcernPropertyEditorBuilder());
|
||||
BeanComponentDefinition writeConcernEditor = helper.getComponent(MongoParsingUtils
|
||||
.getWriteConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(writeConcernEditor);
|
||||
|
||||
BeanComponentDefinition readConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readConcernEditor);
|
||||
|
||||
BeanComponentDefinition readPreferenceEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadPreferencePropertyEditorBuilder());
|
||||
BeanComponentDefinition readPreferenceEditor = helper.getComponent(MongoParsingUtils
|
||||
.getReadPreferencePropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readPreferenceEditor);
|
||||
|
||||
BeanComponentDefinition credentialsEditor = helper
|
||||
.getComponent(MongoParsingUtils.getMongoCredentialPropertyEditor());
|
||||
BeanComponentDefinition credentialsEditor = helper.getComponent(MongoParsingUtils
|
||||
.getMongoCredentialPropertyEditor());
|
||||
parserContext.registerBeanComponent(credentialsEditor);
|
||||
|
||||
BeanComponentDefinition uuidRepresentationEditor = helper
|
||||
.getComponent(MongoParsingUtils.getUUidRepresentationEditorBuilder());
|
||||
parserContext.registerBeanComponent(uuidRepresentationEditor);
|
||||
|
||||
parserContext.popAndRegisterContainingComponent();
|
||||
|
||||
return mongoComponent.getBeanDefinition();
|
||||
|
||||
@@ -20,26 +20,22 @@ import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB to be extended for JavaConfiguration usage.
|
||||
*
|
||||
@@ -79,12 +75,11 @@ public abstract class MongoConfigurationSupport {
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||
throws ClassNotFoundException {
|
||||
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
@@ -93,30 +88,14 @@ public abstract class MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the
|
||||
* {@link org.springframework.data.mongodb.core.convert.MappingMongoConverter} and {@link MongoMappingContext}.
|
||||
* Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
* <p>
|
||||
* <strong>NOTE:</strong> Use {@link #configureConverters(MongoConverterConfigurationAdapter)} to configure MongoDB
|
||||
* native simple types and register custom {@link Converter converters}.
|
||||
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
|
||||
* {@link MongoMappingContext}. Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public MongoCustomConversions customConversions() {
|
||||
return MongoCustomConversions.create(this::configureConverters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration hook for {@link MongoCustomConversions} creation.
|
||||
*
|
||||
* @param converterConfigurationAdapter never {@literal null}.
|
||||
* @since 2.3
|
||||
* @see MongoConverterConfigurationAdapter#useNativeDriverJavaTimeCodecs()
|
||||
* @see MongoConverterConfigurationAdapter#useSpringDataJavaTimeCodecs()
|
||||
*/
|
||||
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
|
||||
|
||||
public CustomConversions customConversions() {
|
||||
return new MongoCustomConversions(Collections.emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -139,7 +118,8 @@ public abstract class MongoConfigurationSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}.
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and
|
||||
* {@link Persistent}.
|
||||
*
|
||||
* @param basePackage must not be {@literal null}.
|
||||
* @return
|
||||
@@ -159,6 +139,7 @@ public abstract class MongoConfigurationSupport {
|
||||
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
|
||||
false);
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
|
||||
|
||||
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
|
||||
|
||||
@@ -172,7 +153,8 @@ public abstract class MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Configures whether to abbreviate field names for domain objects by configuring a
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created.
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced
|
||||
* customization needs, consider overriding {@link #mappingMongoConverter()}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@@ -195,36 +177,11 @@ public abstract class MongoConfigurationSupport {
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal false} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* @return {@literal true} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will be set to {@literal false}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClientSettings} used to create the actual {@literal MongoClient}. <br />
|
||||
* Override either this method, or use {@link #configureClientSettings(Builder)} to alter the setup.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings mongoClientSettings() {
|
||||
|
||||
MongoClientSettings.Builder builder = MongoClientSettings.builder();
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
configureClientSettings(builder);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure {@link MongoClientSettings} via its {@link Builder} API.
|
||||
*
|
||||
* @param builder never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
// customization hook
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,12 +32,14 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.MongoURI;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to parse {@code db-factory} elements into {@link BeanDefinition}s.
|
||||
@@ -82,11 +84,10 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
// Common setup
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(SimpleMongoClientDatabaseFactory.class);
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class);
|
||||
setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern");
|
||||
|
||||
BeanDefinition mongoUri = getConnectionString(element, parserContext);
|
||||
BeanDefinition mongoUri = getMongoUri(element, parserContext);
|
||||
|
||||
if (mongoUri != null) {
|
||||
|
||||
@@ -96,8 +97,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-client-ref");
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
// Defaulting
|
||||
@@ -119,8 +119,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a default {@link BeanDefinition} of a {@link com.mongodb.client.MongoClient} instance and returns the
|
||||
* name under which the {@link com.mongodb.client.MongoClient} instance was registered under.
|
||||
* Registers a default {@link BeanDefinition} of a {@link Mongo} instance and returns the name under which the
|
||||
* {@link Mongo} instance was registered under.
|
||||
*
|
||||
* @param element must not be {@literal null}.
|
||||
* @param parserContext must not be {@literal null}.
|
||||
@@ -136,7 +136,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link BeanDefinition} for a {@link ConnectionString} depending on configured attributes. <br />
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured
|
||||
* attributes. <br />
|
||||
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except
|
||||
* {@literal write-concern} and/or {@literal id}.
|
||||
*
|
||||
@@ -145,19 +146,11 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
* @return {@literal null} in case no client-/uri defined.
|
||||
*/
|
||||
@Nullable
|
||||
private BeanDefinition getConnectionString(Element element, ParserContext parserContext) {
|
||||
private BeanDefinition getMongoUri(Element element, ParserContext parserContext) {
|
||||
|
||||
String type = null;
|
||||
boolean hasClientUri = element.hasAttribute("client-uri");
|
||||
|
||||
if (element.hasAttribute("client-uri")) {
|
||||
type = "client-uri";
|
||||
} else if (element.hasAttribute("connection-string")) {
|
||||
type = "connection-string";
|
||||
} else if (element.hasAttribute("uri")) {
|
||||
type = "uri";
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(type)) {
|
||||
if (!hasClientUri && !element.hasAttribute("uri")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -171,12 +164,16 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!",
|
||||
parserContext.getReaderContext().error(
|
||||
"Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(ConnectionString.class);
|
||||
builder.addConstructorArgValue(element.getAttribute(type));
|
||||
Class<?> type = MongoClientURI.class;
|
||||
String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type);
|
||||
builder.addConstructorArgValue(uri);
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
}
|
||||
|
||||
@@ -22,12 +22,9 @@ import java.util.Map;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionValidationException;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoServerApiFactoryBean;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.data.mongodb.core.MongoClientOptionsFactoryBean;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -46,92 +43,60 @@ abstract class MongoParsingUtils {
|
||||
private MongoParsingUtils() {}
|
||||
|
||||
/**
|
||||
* Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper
|
||||
* Parses the mongo replica-set element.
|
||||
*
|
||||
* @param parserContext the parser context
|
||||
* @param element the mongo element
|
||||
* @param mongoBuilder the bean definition builder to populate
|
||||
* @return
|
||||
*/
|
||||
static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) {
|
||||
setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds");
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the {@code mongo:client-options} sub-element. Populates the given attribute factory with the proper
|
||||
* attributes.
|
||||
*
|
||||
* @param element
|
||||
* @param mongoClientBuilder
|
||||
* @param element must not be {@literal null}.
|
||||
* @param mongoClientBuilder must not be {@literal null}.
|
||||
* @return
|
||||
* @since 3.0
|
||||
* @since 1.7
|
||||
*/
|
||||
public static boolean parseMongoClientSettings(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
public static boolean parseMongoClientOptions(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
|
||||
Element settingsElement = DomUtils.getChildElementByTagName(element, "client-settings");
|
||||
if (settingsElement == null) {
|
||||
Element optionsElement = DomUtils.getChildElementByTagName(element, "client-options");
|
||||
|
||||
if (optionsElement == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoClientSettingsFactoryBean.class);
|
||||
.genericBeanDefinition(MongoClientOptionsFactoryBean.class);
|
||||
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "application-name", "applicationName");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-concern", "readConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-reads", "retryReads");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-writes", "retryWrites");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "uuid-representation", "uUidRepresentation");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "description", "description");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-connections-per-host", "minConnectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier",
|
||||
"threadsAllowedToBlockForConnectionMultiplier");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-idle-time", "maxConnectionIdleTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-life-time", "maxConnectionLifeTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-frequency", "heartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-heartbeat-frequency", "minHeartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-connect-timeout", "heartbeatConnectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout");
|
||||
|
||||
// SocketSettings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-connect-timeout", "socketConnectTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-read-timeout", "socketReadTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-receive-buffer-size", "socketReceiveBufferSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-send-buffer-size", "socketSendBufferSize");
|
||||
|
||||
// Server Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-heartbeat-frequency",
|
||||
"serverHeartbeatFrequencyMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-min-heartbeat-frequency",
|
||||
"serverMinHeartbeatFrequencyMS");
|
||||
|
||||
// Cluster Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-srv-host", "clusterSrvHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-hosts", "clusterHosts");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-connection-mode", "clusterConnectionMode");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-type", "custerRequiredClusterType");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-local-threshold", "clusterLocalThresholdMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-server-selection-timeout",
|
||||
"clusterServerSelectionTimeoutMS");
|
||||
|
||||
// Connection Pool Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-size", "poolMaxSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-min-size", "poolMinSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-wait-time", "poolMaxWaitTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-life-time",
|
||||
"poolMaxConnectionLifeTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-idle-time",
|
||||
"poolMaxConnectionIdleTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-initial-delay",
|
||||
"poolMaintenanceInitialDelayMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-frequency",
|
||||
"poolMaintenanceFrequencyMS");
|
||||
|
||||
// SSL Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-enabled", "sslEnabled");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-invalid-host-name-allowed",
|
||||
"sslInvalidHostNameAllowed");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-provider", "sslProvider");
|
||||
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// ServerAPI
|
||||
if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) {
|
||||
|
||||
MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean();
|
||||
serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version"));
|
||||
try {
|
||||
clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject());
|
||||
} catch (Exception exception) {
|
||||
throw new BeanDefinitionValidationException("Non parsable server-api.", exception);
|
||||
}
|
||||
} else {
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi");
|
||||
}
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -153,24 +118,6 @@ abstract class MongoParsingUtils {
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ReadConcernPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadConcernPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ReadConcern", ReadConcernPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* One should only register one bean definition but want to have the convenience of using
|
||||
* AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the
|
||||
@@ -178,7 +125,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() {
|
||||
|
||||
Map<String, String> customEditors = new ManagedMap<>();
|
||||
Map<String, String> customEditors = new ManagedMap<String, String>();
|
||||
customEditors.put("com.mongodb.ServerAddress[]",
|
||||
"org.springframework.data.mongodb.config.ServerAddressPropertyEditor");
|
||||
|
||||
@@ -196,7 +143,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<String, Class<?>>();
|
||||
customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
@@ -222,41 +169,4 @@ abstract class MongoParsingUtils {
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getConnectionStringPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ConnectionString", ConnectionStringPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getUUidRepresentationEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("org.bson.UuidRepresentation", UUidRepresentationPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.data.mapping.context.PersistentEntities;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
|
||||
/**
|
||||
* Simple helper to be able to wire the {@link PersistentEntities} from a {@link MappingMongoConverter} bean available
|
||||
* in the application context.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 3.1
|
||||
*/
|
||||
public class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEntities> {
|
||||
|
||||
private final MappingMongoConverter converter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PersistentEntitiesFactoryBean} for the given {@link MappingMongoConverter}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
public PersistentEntitiesFactoryBean(MappingMongoConverter converter) {
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public PersistentEntities getObject() {
|
||||
return PersistentEntities.of(converter.getMappingContext());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return PersistentEntities.class;
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
import org.springframework.data.config.ParsingUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 3.1
|
||||
*/
|
||||
class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends Annotation> getAnnotation() {
|
||||
return EnableReactiveMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||
*/
|
||||
@Override
|
||||
protected String getAuditingHandlerBeanName() {
|
||||
return "reactiveMongoAuditingHandler";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(auditingHandlerDefinition.getSource());
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadConcernLevel;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link ReadConcern}. If it is a well know {@link String} as identified by the
|
||||
* {@link ReadConcernLevel#fromString(String)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readConcernString) {
|
||||
|
||||
if (!StringUtils.hasText(readConcernString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ReadConcern(ReadConcernLevel.fromString(readConcernString)));
|
||||
}
|
||||
}
|
||||
@@ -21,8 +21,8 @@ import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -43,8 +43,8 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address %s '%s'. Check your replica set configuration!";
|
||||
private static final Log LOG = LogFactory.getLog(ServerAddressPropertyEditor.class);
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -88,18 +88,14 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source));
|
||||
}
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source));
|
||||
}
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -109,13 +105,9 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]));
|
||||
}
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]));
|
||||
}
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link UuidRepresentation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String value) {
|
||||
|
||||
if (!StringUtils.hasText(value)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(UuidRepresentation.valueOf(value));
|
||||
}
|
||||
}
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.WriteConcern;
|
||||
public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* Parse a string to a {@link WriteConcern}.
|
||||
* Parse a string to a List<ServerAddress>
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String writeConcernString) {
|
||||
@@ -51,5 +51,6 @@ public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
// pass on the string to the constructor
|
||||
setValue(new WriteConcern(writeConcernString));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -27,9 +29,7 @@ import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
@@ -37,7 +37,6 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -50,50 +49,34 @@ import org.springframework.util.ObjectUtils;
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@AllArgsConstructor
|
||||
class AggregationUtil {
|
||||
|
||||
QueryMapper queryMapper;
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
Lazy<AggregationOperationContext> untypedMappingContext;
|
||||
|
||||
AggregationUtil(QueryMapper queryMapper,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
/**
|
||||
* Prepare the {@link AggregationOperationContext} for a given aggregation by either returning the context itself it
|
||||
* is not {@literal null}, create a {@link TypeBasedAggregationOperationContext} if the aggregation contains type
|
||||
* information (is a {@link TypedAggregation}) or use the {@link Aggregation#DEFAULT_CONTEXT}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param context can be {@literal null}.
|
||||
* @return the root {@link AggregationOperationContext} to use.
|
||||
*/
|
||||
AggregationOperationContext prepareAggregationContext(Aggregation aggregation,
|
||||
@Nullable AggregationOperationContext context) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.mappingContext = mappingContext;
|
||||
this.untypedMappingContext = Lazy
|
||||
.of(() -> new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
AggregationOperationContext createAggregationContext(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
|
||||
DomainTypeMapping domainTypeMapping = aggregation.getOptions().getDomainTypeMapping();
|
||||
|
||||
if (domainTypeMapping == DomainTypeMapping.NONE) {
|
||||
return Aggregation.DEFAULT_CONTEXT;
|
||||
if (context != null) {
|
||||
return context;
|
||||
}
|
||||
|
||||
if (!(aggregation instanceof TypedAggregation)) {
|
||||
|
||||
if(inputType == null) {
|
||||
return untypedMappingContext.get();
|
||||
}
|
||||
|
||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
return new TypeBasedAggregationOperationContext(((TypedAggregation) aggregation).getInputType(), mappingContext,
|
||||
queryMapper);
|
||||
}
|
||||
|
||||
inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
return Aggregation.DEFAULT_CONTEXT;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -105,7 +88,7 @@ class AggregationUtil {
|
||||
*/
|
||||
List<Document> createPipeline(Aggregation aggregation, AggregationOperationContext context) {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
return aggregation.toPipeline(context);
|
||||
}
|
||||
|
||||
@@ -132,6 +115,53 @@ class AggregationUtil {
|
||||
return command;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@code $count} aggregation for {@link Query} and optionally a {@link Class entity class}.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param entityClass can be {@literal null} if the {@link Query} object is empty.
|
||||
* @return the {@link Aggregation} pipeline definition to run a {@code $count} aggregation.
|
||||
*/
|
||||
Aggregation createCountAggregation(Query query, @Nullable Class<?> entityClass) {
|
||||
|
||||
List<AggregationOperation> pipeline = computeCountAggregationPipeline(query, entityClass);
|
||||
|
||||
Aggregation aggregation = entityClass != null ? Aggregation.newAggregation(entityClass, pipeline)
|
||||
: Aggregation.newAggregation(pipeline);
|
||||
aggregation.withOptions(AggregationOptions.builder().collation(query.getCollation().orElse(null)).build());
|
||||
|
||||
return aggregation;
|
||||
}
|
||||
|
||||
private List<AggregationOperation> computeCountAggregationPipeline(Query query, @Nullable Class<?> entityType) {
|
||||
|
||||
CountOperation count = Aggregation.count().as("totalEntityCount");
|
||||
if (query.getQueryObject().isEmpty()) {
|
||||
return Collections.singletonList(count);
|
||||
}
|
||||
|
||||
Assert.notNull(entityType, "Entity type must not be null!");
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(),
|
||||
mappingContext.getPersistentEntity(entityType));
|
||||
|
||||
CriteriaDefinition criteria = new CriteriaDefinition() {
|
||||
|
||||
@Override
|
||||
public Document getCriteriaObject() {
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String getKey() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return Arrays.asList(Aggregation.match(criteria), count);
|
||||
}
|
||||
|
||||
private List<Document> mapAggregationPipeline(List<Document> pipeline) {
|
||||
|
||||
return pipeline.stream().map(val -> queryMapper.getMappedObject(val, Optional.empty()))
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||
|
||||
@@ -25,7 +27,6 @@ import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.messaging.Message;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.OperationType;
|
||||
@@ -38,6 +39,7 @@ import com.mongodb.client.model.changestream.OperationType;
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@EqualsAndHashCode
|
||||
public class ChangeStreamEvent<T> {
|
||||
|
||||
@SuppressWarnings("rawtypes") //
|
||||
@@ -185,8 +187,8 @@ public class ChangeStreamEvent<T> {
|
||||
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType));
|
||||
throw new IllegalArgumentException(String.format("No converter found capable of converting %s to %s",
|
||||
fullDocument.getClass(), targetType));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -197,27 +199,4 @@ public class ChangeStreamEvent<T> {
|
||||
public String toString() {
|
||||
return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ChangeStreamEvent<?> that = (ChangeStreamEvent<?>) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.raw, that.raw)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.targetType, that.targetType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = raw != null ? raw.hashCode() : 0;
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(targetType);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
@@ -23,6 +25,7 @@ import org.bson.BsonDocument;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -42,6 +45,7 @@ import com.mongodb.client.model.changestream.FullDocument;
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@EqualsAndHashCode
|
||||
public class ChangeStreamOptions {
|
||||
|
||||
private @Nullable Object filter;
|
||||
@@ -152,44 +156,6 @@ public class ChangeStreamOptions {
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ChangeStreamOptions that = (ChangeStreamOptions) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.filter, that.filter)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.resumeToken, that.resumeToken)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.resumeTimestamp, that.resumeTimestamp)) {
|
||||
return false;
|
||||
}
|
||||
return resume == that.resume;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(filter);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resume);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
@@ -242,13 +208,13 @@ public class ChangeStreamOptions {
|
||||
|
||||
/**
|
||||
* Set the filter to apply.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Fields on aggregation expression root level are prefixed to map to fields contained in
|
||||
* {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns},
|
||||
* {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken
|
||||
* as given, during the mapping procedure. You may want to have a look at the
|
||||
* <a href="https://docs.mongodb.com/manual/reference/change-events/">structure of Change Events</a>.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are
|
||||
* mapped to domain type fields.
|
||||
*
|
||||
|
||||
@@ -15,13 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.timeseries.GranularityDefinition;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -45,7 +44,6 @@ public class CollectionOptions {
|
||||
private @Nullable Boolean capped;
|
||||
private @Nullable Collation collation;
|
||||
private ValidationOptions validationOptions;
|
||||
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
@@ -58,19 +56,17 @@ public class CollectionOptions {
|
||||
*/
|
||||
@Deprecated
|
||||
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none(), null);
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none());
|
||||
}
|
||||
|
||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||
@Nullable Collation collation, ValidationOptions validationOptions,
|
||||
@Nullable TimeSeriesOptions timeSeriesOptions) {
|
||||
@Nullable Collation collation, ValidationOptions validationOptions) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
this.capped = capped;
|
||||
this.collation = collation;
|
||||
this.validationOptions = validationOptions;
|
||||
this.timeSeriesOptions = timeSeriesOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -84,7 +80,7 @@ public class CollectionOptions {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null);
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -94,21 +90,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use
|
||||
* {@link #timeSeries(TimeSeriesOptions)}.
|
||||
*
|
||||
* @param timeField The name of the property which contains the date in each time series document. Must not be
|
||||
* {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @see #timeSeries(TimeSeriesOptions)
|
||||
* @since 3.3
|
||||
*/
|
||||
public static CollectionOptions timeSeries(String timeField) {
|
||||
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -119,7 +101,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null);
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -130,7 +112,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -141,7 +123,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -152,7 +134,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(@Nullable Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -272,20 +254,7 @@ public class CollectionOptions {
|
||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
||||
*
|
||||
* @param timeSeriesOptions must not be {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -336,16 +305,6 @@ public class CollectionOptions {
|
||||
return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link TimeSeriesOptions} if available.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not specified.
|
||||
* @since 3.3
|
||||
*/
|
||||
public Optional<TimeSeriesOptions> getTimeSeriesOptions() {
|
||||
return Optional.ofNullable(timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of ValidationOptions options.
|
||||
*
|
||||
@@ -353,6 +312,7 @@ public class CollectionOptions {
|
||||
* @author Andreas Zink
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
public static class ValidationOptions {
|
||||
|
||||
private static final ValidationOptions NONE = new ValidationOptions(null, null, null);
|
||||
@@ -361,13 +321,6 @@ public class CollectionOptions {
|
||||
private final @Nullable ValidationLevel validationLevel;
|
||||
private final @Nullable ValidationAction validationAction;
|
||||
|
||||
public ValidationOptions(Validator validator, ValidationLevel validationLevel, ValidationAction validationAction) {
|
||||
|
||||
this.validator = validator;
|
||||
this.validationLevel = validationLevel;
|
||||
this.validationAction = validationAction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an empty {@link ValidationOptions}.
|
||||
*
|
||||
@@ -428,7 +381,7 @@ public class CollectionOptions {
|
||||
/**
|
||||
* Get the {@code validationAction} to perform.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @return @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<ValidationAction> getValidationAction() {
|
||||
return Optional.ofNullable(validationAction);
|
||||
@@ -441,89 +394,4 @@ public class CollectionOptions {
|
||||
return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options applicable to Time Series collections.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/timeseries-collections">https://docs.mongodb.com/manual/core/timeseries-collections</a>
|
||||
*/
|
||||
public static class TimeSeriesOptions {
|
||||
|
||||
private final String timeField;
|
||||
|
||||
private @Nullable final String metaField;
|
||||
|
||||
private final GranularityDefinition granularity;
|
||||
|
||||
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
||||
|
||||
Assert.hasText(timeField, "Time field must not be empty or null!");
|
||||
|
||||
this.timeField = timeField;
|
||||
this.metaField = metaField;
|
||||
this.granularity = granularity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one,
|
||||
* that contains the date in each time series document. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param timeField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public static TimeSeriesOptions timeSeries(String timeField) {
|
||||
return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the name of the field which contains metadata in each time series document. Should not be the {@literal id}
|
||||
* nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or
|
||||
* {@link java.util.Collection}. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param metaField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public TimeSeriesOptions metaField(String metaField) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized.
|
||||
* Select one that is closest to the time span between incoming measurements.
|
||||
*
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
* @see Granularity
|
||||
*/
|
||||
public TimeSeriesOptions granularity(GranularityDefinition granularity) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public String getTimeField() {
|
||||
return timeField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via
|
||||
* {@link org.springframework.util.StringUtils#hasText(String)}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getMetaField() {
|
||||
return metaField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public GranularityDefinition getGranularity() {
|
||||
return granularity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,227 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Value object representing a count query. Count queries using {@code $near} or {@code $nearSphere} require a rewrite
|
||||
* to {@code $geoWithin}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class CountQuery {
|
||||
|
||||
private Document source;
|
||||
|
||||
private CountQuery(Document source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public static CountQuery of(Document source) {
|
||||
return new CountQuery(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the query {@link Document} that can be used with {@code countDocuments()}. Potentially rewrites the query
|
||||
* to be usable with {@code countDocuments()}.
|
||||
*
|
||||
* @return the query {@link Document} that can be used with {@code countDocuments()}.
|
||||
*/
|
||||
public Document toQueryDocument() {
|
||||
|
||||
if (!requiresRewrite(source)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
Document target = new Document();
|
||||
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Document theValue = (Document) entry.getValue();
|
||||
target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and")));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Collection<?> source = (Collection<?>) entry.getValue();
|
||||
|
||||
target.put(entry.getKey(), rewriteCollection(source));
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("$and".equals(entry.getKey()) && target.containsKey("$and")) {
|
||||
// Expect $and to be processed with Document and createGeoWithin.
|
||||
continue;
|
||||
}
|
||||
|
||||
target.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param valueToInspect
|
||||
* @return {@code true} if the enclosing element needs to be rewritten.
|
||||
*/
|
||||
private boolean requiresRewrite(Object valueToInspect) {
|
||||
|
||||
if (valueToInspect instanceof Document) {
|
||||
return requiresRewrite((Document) valueToInspect);
|
||||
}
|
||||
|
||||
if (valueToInspect instanceof Collection) {
|
||||
return requiresRewrite((Collection) valueToInspect);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean requiresRewrite(Collection<?> collection) {
|
||||
|
||||
for (Object o : collection) {
|
||||
if (o instanceof Document && requiresRewrite((Document) o)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean requiresRewrite(Document document) {
|
||||
|
||||
if (containsNear(document)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (Object entry : document.values()) {
|
||||
|
||||
if (requiresRewrite(entry)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private Collection<Object> rewriteCollection(Collection<?> source) {
|
||||
|
||||
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
|
||||
|
||||
for (Object item : source) {
|
||||
if (item instanceof Document && requiresRewrite(item)) {
|
||||
rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument());
|
||||
} else {
|
||||
rewrittenCollection.add(item);
|
||||
}
|
||||
}
|
||||
|
||||
return rewrittenCollection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrite the near query for field {@code key} to {@code $geoWithin}.
|
||||
*
|
||||
* @param key the queried field.
|
||||
* @param source source {@link Document}.
|
||||
* @param $and potentially existing {@code $and} condition.
|
||||
* @return the rewritten query {@link Document}.
|
||||
*/
|
||||
private static Document createGeoWithin(String key, Document source, @Nullable Object $and) {
|
||||
|
||||
boolean spheric = source.containsKey("$nearSphere");
|
||||
Object $near = spheric ? source.get("$nearSphere") : source.get("$near");
|
||||
|
||||
Number maxDistance = source.containsKey("$maxDistance") ? (Number) source.get("$maxDistance") : Double.MAX_VALUE;
|
||||
List<Object> $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance);
|
||||
Document $geoWithinMax = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMax));
|
||||
|
||||
if (!containsNearWithMinDistance(source)) {
|
||||
return new Document(key, $geoWithinMax);
|
||||
}
|
||||
|
||||
Number minDistance = (Number) source.get("$minDistance");
|
||||
List<Object> $centerMin = Arrays.asList(toCenterCoordinates($near), minDistance);
|
||||
Document $geoWithinMin = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMin));
|
||||
|
||||
List<Document> criteria = new ArrayList<>();
|
||||
|
||||
if ($and != null) {
|
||||
if ($and instanceof Collection) {
|
||||
criteria.addAll((Collection) $and);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: "
|
||||
+ $and);
|
||||
}
|
||||
}
|
||||
|
||||
criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin))));
|
||||
criteria.add(new Document(key, $geoWithinMax));
|
||||
return new Document("$and", criteria);
|
||||
}
|
||||
|
||||
private static boolean containsNear(Document source) {
|
||||
return source.containsKey("$near") || source.containsKey("$nearSphere");
|
||||
}
|
||||
|
||||
private static boolean containsNearWithMinDistance(Document source) {
|
||||
|
||||
if (!containsNear(source)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return source.containsKey("$minDistance");
|
||||
}
|
||||
|
||||
private static Object toCenterCoordinates(Object value) {
|
||||
|
||||
if (ObjectUtils.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value instanceof Point) {
|
||||
return Arrays.asList(((Point) value).getX(), ((Point) value).getY());
|
||||
}
|
||||
|
||||
if (value instanceof Document && ((Document) value).containsKey("x")) {
|
||||
|
||||
Document point = (Document) value;
|
||||
return Arrays.asList(point.get("x"), point.get("y"));
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -24,13 +27,11 @@ import java.util.stream.Collectors;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
@@ -45,9 +46,7 @@ import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -63,8 +62,6 @@ import com.mongodb.client.model.*;
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Jacob Botuck
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -74,6 +71,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final BulkOperationContext bulkOperationContext;
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOptions bulkOptions;
|
||||
@@ -97,9 +95,19 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
this.bulkOperationContext = bulkOperationContext;
|
||||
this.exceptionTranslator = new MongoExceptionTranslator();
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @param exceptionTranslator can be {@literal null}.
|
||||
*/
|
||||
public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) {
|
||||
this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
|
||||
*
|
||||
@@ -292,7 +300,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
@@ -306,26 +313,11 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
if (ex instanceof MongoBulkWriteException) {
|
||||
|
||||
MongoBulkWriteException mongoBulkWriteException = (MongoBulkWriteException) ex;
|
||||
if (mongoBulkWriteException.getWriteConcernError() != null) {
|
||||
throw new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
throw new BulkOperationException(ex.getMessage(), mongoBulkWriteException);
|
||||
}
|
||||
|
||||
throw ex;
|
||||
}
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
}
|
||||
|
||||
private WriteModel<Document> extractAndMapWriteModel(SourceAwareWriteModelHolder it) {
|
||||
@@ -429,52 +421,38 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
if (null != bulkOperationContext.getEventPublisher()) {
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
}
|
||||
|
||||
@@ -497,16 +475,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
@@ -552,93 +520,15 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
static final class BulkOperationContext {
|
||||
@Value
|
||||
static class BulkOperationContext {
|
||||
|
||||
private final BulkMode bulkMode;
|
||||
private final Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final ApplicationEventPublisher eventPublisher;
|
||||
private final EntityCallbacks entityCallbacks;
|
||||
|
||||
BulkOperationContext(BulkOperations.BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, ApplicationEventPublisher eventPublisher,
|
||||
EntityCallbacks entityCallbacks) {
|
||||
|
||||
this.bulkMode = bulkMode;
|
||||
this.entity = entity;
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.eventPublisher = eventPublisher;
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
}
|
||||
|
||||
public BulkMode getBulkMode() {
|
||||
return this.bulkMode;
|
||||
}
|
||||
|
||||
public Optional<? extends MongoPersistentEntity<?>> getEntity() {
|
||||
return this.entity;
|
||||
}
|
||||
|
||||
public QueryMapper getQueryMapper() {
|
||||
return this.queryMapper;
|
||||
}
|
||||
|
||||
public UpdateMapper getUpdateMapper() {
|
||||
return this.updateMapper;
|
||||
}
|
||||
|
||||
public ApplicationEventPublisher getEventPublisher() {
|
||||
return this.eventPublisher;
|
||||
}
|
||||
|
||||
public EntityCallbacks getEntityCallbacks() {
|
||||
return this.entityCallbacks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
BulkOperationContext that = (BulkOperationContext) o;
|
||||
|
||||
if (bulkMode != that.bulkMode)
|
||||
return false;
|
||||
if (!ObjectUtils.nullSafeEquals(this.entity, that.entity)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.queryMapper, that.queryMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.updateMapper, that.updateMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.eventPublisher, that.eventPublisher)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.entityCallbacks, that.entityCallbacks);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = bulkMode != null ? bulkMode.hashCode() : 0;
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entity);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(queryMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(updateMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(eventPublisher);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entityCallbacks);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.BulkOperationContext(bulkMode=" + this.getBulkMode() + ", entity="
|
||||
+ this.getEntity() + ", queryMapper=" + this.getQueryMapper() + ", updateMapper=" + this.getUpdateMapper()
|
||||
+ ", eventPublisher=" + this.getEventPublisher() + ", entityCallbacks=" + this.getEntityCallbacks() + ")";
|
||||
}
|
||||
@NonNull BulkMode bulkMode;
|
||||
@NonNull Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
@NonNull QueryMapper queryMapper;
|
||||
@NonNull UpdateMapper updateMapper;
|
||||
ApplicationEventPublisher eventPublisher;
|
||||
EntityCallbacks entityCallbacks;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -647,50 +537,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static final class SourceAwareWriteModelHolder {
|
||||
@Value
|
||||
private static class SourceAwareWriteModelHolder {
|
||||
|
||||
private final Object source;
|
||||
private final WriteModel<Document> model;
|
||||
|
||||
SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||
|
||||
this.source = source;
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
public Object getSource() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public WriteModel<Document> getModel() {
|
||||
return this.model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
SourceAwareWriteModelHolder that = (SourceAwareWriteModelHolder) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.source, that.source)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.model, that.model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(model);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(source);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.SourceAwareWriteModelHolder(source=" + this.getSource() + ", model="
|
||||
+ this.getModel() + ")";
|
||||
}
|
||||
Object source;
|
||||
WriteModel<Document> model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -64,7 +64,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
this(mongoDbFactory, collectionName, queryMapper, null);
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
@Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
|
||||
/**
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDatabaseFactory}.
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -29,14 +29,14 @@ import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
*/
|
||||
class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final QueryMapper mapper;
|
||||
|
||||
/**
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mapper must not be {@literal null}.
|
||||
*/
|
||||
DefaultIndexOperationsProvider(MongoDatabaseFactory mongoDbFactory, QueryMapper mapper) {
|
||||
DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory, QueryMapper mapper) {
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.mapper = mapper;
|
||||
@@ -47,7 +47,7 @@ class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public IndexOperations indexOps(String collectionName, Class<?> type) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type);
|
||||
public IndexOperations indexOps(String collectionName) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Encryption algorithms supported by MongoDB Client Side Field Level Encryption.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public final class EncryptionAlgorithms {
|
||||
|
||||
public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic";
|
||||
public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random";
|
||||
|
||||
}
|
||||
@@ -15,42 +15,35 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.IdentifierAccessor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.TimeSeries;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.EntityProjectionIntrospector;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
@@ -62,26 +55,12 @@ import org.springframework.util.StringUtils;
|
||||
* @see MongoTemplate
|
||||
* @see ReactiveMongoTemplate
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class EntityOperations {
|
||||
|
||||
private static final String ID_FIELD = "_id";
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context;
|
||||
|
||||
private final EntityProjectionIntrospector introspector;
|
||||
|
||||
EntityOperations(MongoConverter converter) {
|
||||
this(converter.getMappingContext(), converter.getCustomConversions(), converter.getProjectionFactory());
|
||||
}
|
||||
|
||||
EntityOperations(MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
CustomConversions conversions, ProjectionFactory projectionFactory) {
|
||||
this.context = context;
|
||||
this.introspector = EntityProjectionIntrospector.create(projectionFactory,
|
||||
EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy()
|
||||
.and(((target, underlyingType) -> !conversions.isSimpleType(target))),
|
||||
context);
|
||||
}
|
||||
private final @NonNull MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Entity} for the given bean.
|
||||
@@ -90,7 +69,7 @@ class EntityOperations {
|
||||
* @return new instance of {@link Entity}.
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
<T> Entity<T> forEntity(T entity) {
|
||||
public <T> Entity<T> forEntity(T entity) {
|
||||
|
||||
Assert.notNull(entity, "Bean must not be null!");
|
||||
|
||||
@@ -113,7 +92,7 @@ class EntityOperations {
|
||||
* @return new instance of {@link AdaptibleEntity}.
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
<T> AdaptibleEntity<T> forEntity(T entity, ConversionService conversionService) {
|
||||
public <T> AdaptibleEntity<T> forEntity(T entity, ConversionService conversionService) {
|
||||
|
||||
Assert.notNull(entity, "Bean must not be null!");
|
||||
Assert.notNull(conversionService, "ConversionService must not be null!");
|
||||
@@ -129,20 +108,6 @@ class EntityOperations {
|
||||
return AdaptibleMappedEntity.of(entity, context, conversionService);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source can be {@literal null}.
|
||||
* @return {@literal true} if the given value is an {@literal array}, {@link Collection} or {@link Iterator}.
|
||||
* @since 3.2
|
||||
*/
|
||||
static boolean isCollectionLike(@Nullable Object source) {
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return ObjectUtils.isArray(source) || source instanceof Collection || source instanceof Iterator;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param entityClass should not be null.
|
||||
* @return the {@link MongoPersistentEntity#getCollection() collection name}.
|
||||
@@ -245,20 +210,6 @@ class EntityOperations {
|
||||
return UntypedOperations.instance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspect the given {@link Class result type} in the context of the {@link Class entity type} whether the returned
|
||||
* type is a projection and what property paths are participating in the projection.
|
||||
*
|
||||
* @param resultType the type to project on. Must not be {@literal null}.
|
||||
* @param entityType the source domain type. Must not be {@literal null}.
|
||||
* @return the introspection result.
|
||||
* @since 3.4
|
||||
* @see EntityProjectionIntrospector#introspect(Class, Class)
|
||||
*/
|
||||
public <M, D> EntityProjection<M, D> introspectProjection(Class<M> resultType, Class<D> entityType) {
|
||||
return introspector.introspect(resultType, entityType);
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of information about an entity.
|
||||
*
|
||||
@@ -395,14 +346,11 @@ class EntityOperations {
|
||||
Number getVersion();
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
private static class UnmappedEntity<T extends Map<String, Object>> implements AdaptibleEntity<T> {
|
||||
|
||||
private final T map;
|
||||
|
||||
protected UnmappedEntity(T map) {
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName()
|
||||
@@ -512,7 +460,7 @@ class EntityOperations {
|
||||
|
||||
private static class SimpleMappedEntity<T extends Map<String, Object>> extends UnmappedEntity<T> {
|
||||
|
||||
protected SimpleMappedEntity(T map) {
|
||||
SimpleMappedEntity(T map) {
|
||||
super(map);
|
||||
}
|
||||
|
||||
@@ -535,19 +483,12 @@ class EntityOperations {
|
||||
}
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor(access = AccessLevel.PROTECTED)
|
||||
private static class MappedEntity<T> implements Entity<T> {
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
private final IdentifierAccessor idAccessor;
|
||||
private final PersistentPropertyAccessor<T> propertyAccessor;
|
||||
|
||||
protected MappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor idAccessor,
|
||||
PersistentPropertyAccessor<T> propertyAccessor) {
|
||||
|
||||
this.entity = entity;
|
||||
this.idAccessor = idAccessor;
|
||||
this.propertyAccessor = propertyAccessor;
|
||||
}
|
||||
private final @NonNull MongoPersistentEntity<?> entity;
|
||||
private final @NonNull IdentifierAccessor idAccessor;
|
||||
private final @NonNull PersistentPropertyAccessor<T> propertyAccessor;
|
||||
|
||||
private static <T> MappedEntity<T> of(T bean,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
@@ -812,36 +753,17 @@ class EntityOperations {
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
|
||||
/**
|
||||
* Derive the applicable {@link CollectionOptions} for the given type.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
CollectionOptions getCollectionOptions();
|
||||
|
||||
/**
|
||||
* Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially
|
||||
* annotated field names.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom
|
||||
* conversions).
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
enum UntypedOperations implements TypedOperations<Object> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
UntypedOperations() {}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public static <T> TypedOperations<T> instance() {
|
||||
return (TypedOperations) INSTANCE;
|
||||
@@ -869,16 +791,6 @@ class EntityOperations {
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
return CollectionOptions.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) {
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -886,13 +798,10 @@ class EntityOperations {
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class TypedEntityOperations<T> implements TypedOperations<T> {
|
||||
|
||||
private final MongoPersistentEntity<T> entity;
|
||||
|
||||
protected TypedEntityOperations(MongoPersistentEntity<T> entity) {
|
||||
this.entity = entity;
|
||||
}
|
||||
private final @NonNull MongoPersistentEntity<T> entity;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -916,58 +825,6 @@ class EntityOperations {
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
|
||||
CollectionOptions collectionOptions = CollectionOptions.empty();
|
||||
if (entity.hasCollation()) {
|
||||
collectionOptions = collectionOptions.collation(entity.getCollation());
|
||||
}
|
||||
|
||||
if (entity.isAnnotationPresent(TimeSeries.class)) {
|
||||
|
||||
TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class);
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.timeField()) == null) {
|
||||
throw new MappingException(String.format("Time series field '%s' does not exist in type %s",
|
||||
timeSeries.timeField(), entity.getName()));
|
||||
}
|
||||
|
||||
TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField());
|
||||
if (StringUtils.hasText(timeSeries.metaField())) {
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.metaField()) == null) {
|
||||
throw new MappingException(
|
||||
String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName()));
|
||||
}
|
||||
|
||||
options = options.metaField(timeSeries.metaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(timeSeries.granularity())) {
|
||||
options = options.granularity(timeSeries.granularity());
|
||||
}
|
||||
collectionOptions = collectionOptions.timeSeries(options);
|
||||
}
|
||||
|
||||
return collectionOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) {
|
||||
|
||||
TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField()));
|
||||
|
||||
if (StringUtils.hasText(source.getMetaField())) {
|
||||
target = target.metaField(mappedNameOrDefault(source.getMetaField()));
|
||||
}
|
||||
return target.granularity(source.getGranularity());
|
||||
}
|
||||
|
||||
private String mappedNameOrDefault(String name) {
|
||||
MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name);
|
||||
return persistentProperty != null ? persistentProperty.getFieldName() : name;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -90,8 +90,8 @@ public interface ExecutableAggregationOperation {
|
||||
* Apply pipeline operations as specified and stream all matching elements. <br />
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable}
|
||||
*
|
||||
* @return a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed.
|
||||
* Never {@literal null}.
|
||||
* @return a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs
|
||||
* to be closed. Never {@literal null}.
|
||||
*/
|
||||
CloseableIterator<T> stream();
|
||||
}
|
||||
|
||||
@@ -15,10 +15,16 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -29,13 +35,10 @@ import org.springframework.util.StringUtils;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableAggregationOperationSupport implements ExecutableAggregationOperation {
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableAggregationOperationSupport(MongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -53,21 +56,15 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableAggregationSupport<T>
|
||||
implements AggregationWithAggregation<T>, ExecutableAggregation<T>, TerminatingAggregation<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final Aggregation aggregation;
|
||||
private final String collection;
|
||||
|
||||
public ExecutableAggregationSupport(MongoTemplate template, Class<T> domainType, Aggregation aggregation,
|
||||
String collection) {
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.aggregation = aggregation;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
@Nullable Aggregation aggregation;
|
||||
@Nullable String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -21,7 +21,6 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -44,7 +43,7 @@ import com.mongodb.client.MongoCollection;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
@@ -118,18 +117,13 @@ public interface ExecutableFindOperation {
|
||||
/**
|
||||
* Stream all matching elements.
|
||||
*
|
||||
* @return a {@link Stream} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. Never
|
||||
* {@literal null}.
|
||||
* @return a {@link Stream} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to be
|
||||
* closed. Never {@literal null}.
|
||||
*/
|
||||
Stream<T> stream();
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <br />
|
||||
* This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation
|
||||
* execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard,
|
||||
* session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link MongoOperations#estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @return total number of matching elements.
|
||||
*/
|
||||
@@ -176,18 +170,6 @@ public interface ExecutableFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -309,21 +291,9 @@ public interface ExecutableFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,11 +15,17 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -41,15 +47,12 @@ import com.mongodb.client.FindIterable;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableFindOperationSupport(MongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -68,23 +71,16 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableFindSupport<T>
|
||||
implements ExecutableFind<T>, FindWithCollection<T>, FindWithProjection<T>, FindWithQuery<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
@Nullable private final String collection;
|
||||
private final Query query;
|
||||
|
||||
ExecutableFindSupport(MongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
String collection, Query query) {
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class<?> domainType;
|
||||
Class<T> returnType;
|
||||
@Nullable String collection;
|
||||
Query query;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
@@ -32,13 +37,10 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableInsertOperationSupport(MongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -56,20 +58,14 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableInsertSupport<T> implements ExecutableInsert<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
@Nullable private final String collection;
|
||||
@Nullable private final BulkMode bulkMode;
|
||||
|
||||
ExecutableInsertSupport(MongoTemplate template, Class<T> domainType, String collection, BulkMode bulkMode) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.collection = collection;
|
||||
this.bulkMode = bulkMode;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
@Nullable String collection;
|
||||
@Nullable BulkMode bulkMode;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -19,7 +19,6 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -31,7 +30,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -45,7 +44,6 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ExecutableMapReduceOperation {
|
||||
@@ -148,18 +146,6 @@ public interface ExecutableMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
@@ -29,17 +32,12 @@ import org.springframework.util.StringUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableMapReduceOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -120,18 +119,6 @@ public interface ExecutableRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -31,15 +36,12 @@ import com.mongodb.client.result.DeleteResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final MongoTemplate tempate;
|
||||
|
||||
public ExecutableRemoveOperationSupport(MongoTemplate tempate) {
|
||||
this.tempate = tempate;
|
||||
}
|
||||
private final @NonNull MongoTemplate tempate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -57,19 +59,14 @@ class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableRemoveSupport<T> implements ExecutableRemove<T>, RemoveWithCollection<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final Query query;
|
||||
@Nullable private final String collection;
|
||||
|
||||
public ExecutableRemoveSupport(MongoTemplate template, Class<T> domainType, Query query, String collection) {
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.query = query;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
Query query;
|
||||
@Nullable String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -17,11 +17,8 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
@@ -89,7 +86,7 @@ public interface ExecutableUpdateOperation {
|
||||
|
||||
/**
|
||||
* Trigger
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
@@ -154,16 +151,13 @@ public interface ExecutableUpdateOperation {
|
||||
interface UpdateWithUpdate<T> {
|
||||
|
||||
/**
|
||||
* Set the {@link UpdateDefinition} to be applied.
|
||||
* Set the {@link Update} to be applied.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingUpdate}.
|
||||
* @throws IllegalArgumentException if update is {@literal null}.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
TerminatingUpdate<T> apply(UpdateDefinition update);
|
||||
TerminatingUpdate<T> apply(Update update);
|
||||
|
||||
/**
|
||||
* Specify {@code replacement} object.
|
||||
@@ -211,18 +205,6 @@ public interface ExecutableUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,8 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -30,15 +35,12 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableUpdateOperationSupport(MongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -56,41 +58,28 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableUpdateSupport<T>
|
||||
implements ExecutableUpdate<T>, UpdateWithCollection<T>, UpdateWithQuery<T>, TerminatingUpdate<T>,
|
||||
FindAndReplaceWithOptions<T>, TerminatingFindAndReplace<T>, FindAndReplaceWithProjection<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class domainType;
|
||||
private final Query query;
|
||||
@Nullable private final UpdateDefinition update;
|
||||
@Nullable private final String collection;
|
||||
@Nullable private final FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable private final FindAndReplaceOptions findAndReplaceOptions;
|
||||
@Nullable private final Object replacement;
|
||||
private final Class<T> targetType;
|
||||
|
||||
ExecutableUpdateSupport(MongoTemplate template, Class domainType, Query query, UpdateDefinition update,
|
||||
String collection, FindAndModifyOptions findAndModifyOptions, FindAndReplaceOptions findAndReplaceOptions,
|
||||
Object replacement, Class<T> targetType) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.query = query;
|
||||
this.update = update;
|
||||
this.collection = collection;
|
||||
this.findAndModifyOptions = findAndModifyOptions;
|
||||
this.findAndReplaceOptions = findAndReplaceOptions;
|
||||
this.replacement = replacement;
|
||||
this.targetType = targetType;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class domainType;
|
||||
Query query;
|
||||
@Nullable Update update;
|
||||
@Nullable String collection;
|
||||
@Nullable FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable FindAndReplaceOptions findAndReplaceOptions;
|
||||
@Nullable Object replacement;
|
||||
@NonNull Class<T> targetType;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.UpdateDefinition)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(Update)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingUpdate<T> apply(UpdateDefinition update) {
|
||||
public TerminatingUpdate<T> apply(Update update) {
|
||||
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Options for
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>.
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>.
|
||||
* <br />
|
||||
* Defaults to
|
||||
* <dl>
|
||||
|
||||
@@ -115,10 +115,6 @@ abstract class IndexConverters {
|
||||
ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class)));
|
||||
}
|
||||
|
||||
if (indexOptions.containsKey("wildcardProjection")) {
|
||||
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
|
||||
}
|
||||
|
||||
return ops;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
@@ -24,6 +27,8 @@ import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
|
||||
import com.mongodb.client.model.Filters;
|
||||
|
||||
/**
|
||||
* A MongoDB document in its mapped state. I.e. after a source document has been mapped using mapping information of the
|
||||
* entity the source document was supposed to represent.
|
||||
@@ -31,20 +36,13 @@ import org.springframework.data.util.StreamUtils;
|
||||
* @author Oliver Gierke
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor(staticName = "of")
|
||||
public class MappedDocument {
|
||||
|
||||
private static final String ID_FIELD = "_id";
|
||||
private static final Document ID_ONLY_PROJECTION = new Document(ID_FIELD, 1);
|
||||
|
||||
private final Document document;
|
||||
|
||||
private MappedDocument(Document document) {
|
||||
this.document = document;
|
||||
}
|
||||
|
||||
public static MappedDocument of(Document document) {
|
||||
return new MappedDocument(document);
|
||||
}
|
||||
private final @Getter Document document;
|
||||
|
||||
public static Document getIdOnlyProjection() {
|
||||
return ID_ONLY_PROJECTION;
|
||||
@@ -82,21 +80,13 @@ public class MappedDocument {
|
||||
}
|
||||
|
||||
public Bson getIdFilter() {
|
||||
return new Document(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public Object get(String key) {
|
||||
return document.get(key);
|
||||
return Filters.eq(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public UpdateDefinition updateWithoutId() {
|
||||
return new MappedUpdate(Update.fromDocument(document, ID_FIELD));
|
||||
}
|
||||
|
||||
public Document getDocument() {
|
||||
return this.document;
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link UpdateDefinition} that indicates that the {@link #getUpdateObject() update object} has already been
|
||||
* mapped to the specific domain type.
|
||||
@@ -156,14 +146,5 @@ public class MappedDocument {
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return delegate.getArrayFilters();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public boolean hasArrayFilters() {
|
||||
return delegate.hasArrayFilters();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,20 +20,13 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
@@ -41,12 +34,10 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
@@ -61,7 +52,6 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final Predicate<JsonSchemaPropertyContext> filter;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
@@ -71,24 +61,10 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
this(converter, (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter.getMappingContext(),
|
||||
(property) -> true);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter,
|
||||
MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
Predicate<JsonSchemaPropertyContext> filter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = mappingContext;
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoJsonSchemaCreator filter(Predicate<JsonSchemaPropertyContext> filter) {
|
||||
return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter);
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -101,29 +77,11 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
{
|
||||
Encrypted encrypted = entity.findAnnotation(Encrypted.class);
|
||||
if (encrypted != null) {
|
||||
|
||||
Document encryptionMetadata = new Document();
|
||||
|
||||
Collection<Object> encryptionKeyIds = entity.getEncryptionKeyIds();
|
||||
if (!CollectionUtils.isEmpty(encryptionKeyIds)) {
|
||||
encryptionMetadata.append("keyId", encryptionKeyIds);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(encrypted.algorithm())) {
|
||||
encryptionMetadata.append("algorithm", encrypted.algorithm());
|
||||
}
|
||||
|
||||
schemaBuilder.encryptionMetadata(encryptionMetadata);
|
||||
}
|
||||
}
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
@@ -135,11 +93,6 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
if (!filter.test(new PropertyContext(
|
||||
currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")), nested))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
@@ -161,88 +114,21 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (!isCollection(property) && property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
JsonSchemaProperty schemaProperty;
|
||||
if (isCollection(property)) {
|
||||
schemaProperty = createArraySchemaProperty(fieldName, property, required);
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
} else if (property.isMap()) {
|
||||
schemaProperty = createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
schemaProperty = createEnumSchemaProperty(fieldName, targetType, required);
|
||||
} else {
|
||||
schemaProperty = createSchemaProperty(fieldName, targetType, required);
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return applyEncryptionDataIfNecessary(property, schemaProperty);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createArraySchemaProperty(String fieldName, MongoPersistentProperty property,
|
||||
boolean required) {
|
||||
|
||||
ArrayJsonSchemaProperty schemaProperty = JsonSchemaProperty.array(fieldName);
|
||||
|
||||
if (isSpecificType(property)) {
|
||||
schemaProperty = potentiallyEnhanceArraySchemaProperty(property, schemaProperty);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(schemaProperty, required);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private ArrayJsonSchemaProperty potentiallyEnhanceArraySchemaProperty(MongoPersistentProperty property,
|
||||
ArrayJsonSchemaProperty schemaProperty) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getPersistentEntity(property.getTypeInformation().getRequiredComponentType());
|
||||
|
||||
if (persistentEntity != null) {
|
||||
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(Collections.emptyList(), persistentEntity);
|
||||
|
||||
if (nestedProperties.isEmpty()) {
|
||||
return schemaProperty;
|
||||
}
|
||||
|
||||
return schemaProperty
|
||||
.items(JsonSchemaObject.object().properties(nestedProperties.toArray(new JsonSchemaProperty[0])));
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignable(Enum.class, property.getActualType())) {
|
||||
|
||||
List<Object> possibleValues = getPossibleEnumValues((Class<Enum>) property.getActualType());
|
||||
|
||||
return schemaProperty
|
||||
.items(createSchemaObject(computeTargetType(property.getActualType(), possibleValues), possibleValues));
|
||||
}
|
||||
|
||||
return schemaProperty.items(JsonSchemaObject.of(property.getActualType()));
|
||||
}
|
||||
|
||||
private boolean isSpecificType(MongoPersistentProperty property) {
|
||||
return !ClassTypeInformation.OBJECT.equals(property.getTypeInformation().getActualType());
|
||||
}
|
||||
|
||||
private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property,
|
||||
JsonSchemaProperty schemaProperty) {
|
||||
|
||||
Encrypted encrypted = property.findAnnotation(Encrypted.class);
|
||||
if (encrypted == null) {
|
||||
return schemaProperty;
|
||||
}
|
||||
|
||||
EncryptedJsonSchemaProperty enc = new EncryptedJsonSchemaProperty(schemaProperty);
|
||||
if (StringUtils.hasText(encrypted.algorithm())) {
|
||||
enc = enc.algorithm(encrypted.algorithm());
|
||||
}
|
||||
if (!ObjectUtils.isEmpty(encrypted.keyId())) {
|
||||
enc = enc.keys(property.getEncryptionKeyIds());
|
||||
}
|
||||
return enc;
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
@@ -256,12 +142,15 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = getPossibleEnumValues((Class<Enum>) targetType);
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
|
||||
targetType = computeTargetType(targetType, possibleValues);
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
@@ -272,20 +161,14 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = createSchemaObject(type, possibleValues);
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private TypedJsonSchemaObject createSchemaObject(Object type, Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
return schemaObject;
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
@@ -316,53 +199,12 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
private static Class<?> computeTargetType(Class<?> fallback, List<Object> possibleValues) {
|
||||
return possibleValues.isEmpty() ? fallback : possibleValues.iterator().next().getClass();
|
||||
}
|
||||
|
||||
private <E extends Enum<E>> List<Object> getPossibleEnumValues(Class<E> targetType) {
|
||||
|
||||
EnumSet<E> enumSet = EnumSet.allOf(targetType);
|
||||
List<Object> possibleValues = new ArrayList<>(enumSet.size());
|
||||
|
||||
for (Object enumValue : enumSet) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
return possibleValues;
|
||||
}
|
||||
|
||||
private static boolean isCollection(MongoPersistentProperty property) {
|
||||
return property.isCollectionLike() && !property.getType().equals(byte[].class);
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
return required ? JsonSchemaProperty.required(property) : property;
|
||||
}
|
||||
|
||||
class PropertyContext implements JsonSchemaPropertyContext {
|
||||
|
||||
private final String path;
|
||||
private final MongoPersistentProperty property;
|
||||
|
||||
public PropertyContext(String path, MongoPersistentProperty property) {
|
||||
this.path = path;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getProperty() {
|
||||
if (!required) {
|
||||
return property;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> MongoPersistentEntity<T> resolveEntity(MongoPersistentProperty property) {
|
||||
return (MongoPersistentEntity<T>) mappingContext.getPersistentEntity(property);
|
||||
}
|
||||
return JsonSchemaProperty.required(property);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
import org.springframework.jmx.export.annotation.ManagedResource;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -34,13 +34,24 @@ import com.mongodb.client.MongoDatabase;
|
||||
@ManagedResource(description = "Mongo Admin Operations")
|
||||
public class MongoAdmin implements MongoAdminOperations {
|
||||
|
||||
private final MongoClient mongoClient;
|
||||
private final Object mongoClient;
|
||||
|
||||
/**
|
||||
* @param mongoClient
|
||||
* @deprecated since 2.2 in favor of {@link MongoAdmin#MongoAdmin(com.mongodb.client.MongoClient)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoAdmin(MongoClient mongoClient) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
this.mongoClient = mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param client the underlying {@link com.mongodb.client.MongoClient} used for data access.
|
||||
* @since 2.2
|
||||
*/
|
||||
public MongoAdmin(MongoClient client) {
|
||||
public MongoAdmin(com.mongodb.client.MongoClient client) {
|
||||
|
||||
Assert.notNull(client, "Client must not be null!");
|
||||
this.mongoClient = client;
|
||||
@@ -77,6 +88,11 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
}
|
||||
|
||||
MongoDatabase getDB(String databaseName) {
|
||||
return mongoClient.getDatabase(databaseName);
|
||||
|
||||
if (mongoClient instanceof MongoClient) {
|
||||
return ((MongoClient) mongoClient).getDatabase(databaseName);
|
||||
}
|
||||
|
||||
return ((com.mongodb.client.MongoClient) mongoClient).getDatabase(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,72 +16,70 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.event.ClusterListener;
|
||||
|
||||
/**
|
||||
* Convenient factory for configuring MongoDB.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2 - There is no replacement for this {@link org.springframework.beans.factory.FactoryBean} at
|
||||
* this time. However moving forward the {@link org.springframework.beans.factory.FactoryBean} will be
|
||||
* suitable to provide instances of {@link com.mongodb.client.MongoClient}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> implements PersistenceExceptionTranslator {
|
||||
|
||||
private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator();
|
||||
|
||||
private @Nullable MongoClientSettings mongoClientSettings;
|
||||
private @Nullable MongoClientOptions mongoClientOptions;
|
||||
private @Nullable String host;
|
||||
private @Nullable Integer port;
|
||||
private @Nullable List<MongoCredential> credential = null;
|
||||
private @Nullable ConnectionString connectionString;
|
||||
private @Nullable String replicaSet = null;
|
||||
private List<ServerAddress> replicaSetSeeds = Collections.emptyList();
|
||||
private List<MongoCredential> credentials = Collections.emptyList();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}.
|
||||
* Set the {@link MongoClientOptions} to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClientOptions
|
||||
*/
|
||||
public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientOptions) {
|
||||
this.mongoClientSettings = mongoClientOptions;
|
||||
public void setMongoClientOptions(@Nullable MongoClientOptions mongoClientOptions) {
|
||||
this.mongoClientOptions = mongoClientOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of credentials to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param credential can be {@literal null}.
|
||||
* @param credentials can be {@literal null}.
|
||||
*/
|
||||
public void setCredential(@Nullable MongoCredential[] credential) {
|
||||
this.credential = Arrays.asList(credential);
|
||||
public void setCredentials(@Nullable MongoCredential[] credentials) {
|
||||
this.credentials = filterNonNullElementsAsList(credentials);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of {@link ServerAddress} to build up a replica set for.
|
||||
*
|
||||
* @param replicaSetSeeds can be {@literal null}.
|
||||
*/
|
||||
public void setReplicaSetSeeds(@Nullable ServerAddress[] replicaSetSeeds) {
|
||||
this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -102,14 +100,6 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
public void setConnectionString(@Nullable ConnectionString connectionString) {
|
||||
this.connectionString = connectionString;
|
||||
}
|
||||
|
||||
public void setReplicaSet(@Nullable String replicaSet) {
|
||||
this.replicaSet = replicaSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to use.
|
||||
*
|
||||
@@ -142,198 +132,12 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
*/
|
||||
@Override
|
||||
protected MongoClient createInstance() throws Exception {
|
||||
return createMongoClient(computeClientSetting());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create {@link MongoClientSettings} based on configuration and priority (lower is better).
|
||||
* <ol>
|
||||
* <li>{@link MongoClientFactoryBean#mongoClientSettings}</li>
|
||||
* <li>{@link MongoClientFactoryBean#connectionString}</li>
|
||||
* <li>default {@link MongoClientSettings}</li>
|
||||
* </ol>
|
||||
*
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings computeClientSetting() {
|
||||
|
||||
if (connectionString != null && (StringUtils.hasText(host) || port != null)) {
|
||||
throw new IllegalStateException("ConnectionString and host/port configuration exclude one another!");
|
||||
if (mongoClientOptions == null) {
|
||||
mongoClientOptions = MongoClientOptions.builder().build();
|
||||
}
|
||||
|
||||
ConnectionString connectionString = this.connectionString != null ? this.connectionString
|
||||
: new ConnectionString(String.format("mongodb://%s:%s", getOrDefault(host, ServerAddress.defaultHost()),
|
||||
getOrDefault(port, "" + ServerAddress.defaultPort())));
|
||||
|
||||
Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString);
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
|
||||
if (mongoClientSettings != null) {
|
||||
|
||||
MongoClientSettings defaultSettings = MongoClientSettings.builder().build();
|
||||
|
||||
SslSettings sslSettings = mongoClientSettings.getSslSettings();
|
||||
ClusterSettings clusterSettings = mongoClientSettings.getClusterSettings();
|
||||
ConnectionPoolSettings connectionPoolSettings = mongoClientSettings.getConnectionPoolSettings();
|
||||
SocketSettings socketSettings = mongoClientSettings.getSocketSettings();
|
||||
ServerSettings serverSettings = mongoClientSettings.getServerSettings();
|
||||
|
||||
builder = builder //
|
||||
.applicationName(computeSettingsValue(defaultSettings.getApplicationName(),
|
||||
mongoClientSettings.getApplicationName(), connectionString.getApplicationName())) //
|
||||
.applyToSslSettings(settings -> {
|
||||
|
||||
applySettings(settings::enabled, computeSettingsValue(SslSettings::isEnabled,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslEnabled()));
|
||||
applySettings(settings::invalidHostNameAllowed, (computeSettingsValue(SslSettings::isInvalidHostNameAllowed,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslInvalidHostnameAllowed())));
|
||||
settings.context(sslSettings.getContext());
|
||||
}).applyToClusterSettings(settings -> {
|
||||
|
||||
applySettings(settings::hosts,
|
||||
computeSettingsValue(ClusterSettings::getHosts, defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getHosts().stream().map(ServerAddress::new).collect(Collectors.toList())));
|
||||
|
||||
applySettings(settings::requiredReplicaSetName,
|
||||
computeSettingsValue(ClusterSettings::getRequiredReplicaSetName, defaultSettings.getClusterSettings(),
|
||||
clusterSettings, connectionString.getRequiredReplicaSetName()));
|
||||
|
||||
applySettings(settings::srvHost, computeSettingsValue(ClusterSettings::getSrvHost,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(settings::mode, computeSettingsValue(ClusterSettings::getMode,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(it -> settings.localThreshold(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getLocalThreshold(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings, connectionString.getLocalThreshold()));
|
||||
|
||||
applySettings(settings::requiredClusterType, computeSettingsValue(ClusterSettings::getRequiredClusterType,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
applySettings(it -> settings.serverSelectionTimeout(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getServerSelectionTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getServerSelectionTimeout()));
|
||||
|
||||
applySettings(settings::serverSelector, computeSettingsValue(ClusterSettings::getServerSelector,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
List<ClusterListener> clusterListeners = computeSettingsValue(ClusterSettings::getClusterListeners,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null);
|
||||
if (clusterListeners != null) {
|
||||
clusterListeners.forEach(settings::addClusterListener);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.maintenanceFrequency(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaintenanceFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(it -> settings.maxConnectionIdleTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionIdleTime()));
|
||||
|
||||
applySettings(it -> settings.maxConnectionLifeTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionLifeTime()));
|
||||
|
||||
applySettings(it -> settings.maxWaitTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxWaitTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxWaitTime()));
|
||||
|
||||
applySettings(it -> settings.maintenanceInitialDelay(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue(
|
||||
(ConnectionPoolSettings it) -> it.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(settings::minSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMinSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMinConnectionPoolSize()));
|
||||
applySettings(settings::maxSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMaxSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMaxConnectionPoolSize()));
|
||||
}) //
|
||||
.applyToSocketSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.connectTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getConnectTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getConnectTimeout()));
|
||||
|
||||
applySettings(it -> settings.readTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getReadTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getSocketTimeout()));
|
||||
applySettings(settings::receiveBufferSize, computeSettingsValue(SocketSettings::getReceiveBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
applySettings(settings::sendBufferSize, computeSettingsValue(SocketSettings::getSendBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
}) //
|
||||
.applyToServerSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.minHeartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, null));
|
||||
|
||||
applySettings(it -> settings.heartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, connectionString.getHeartbeatFrequency()));
|
||||
settings.applySettings(serverSettings);
|
||||
}) //
|
||||
.autoEncryptionSettings(mongoClientSettings.getAutoEncryptionSettings()) //
|
||||
.codecRegistry(mongoClientSettings.getCodecRegistry()); //
|
||||
|
||||
applySettings(builder::readConcern, computeSettingsValue(defaultSettings.getReadConcern(),
|
||||
mongoClientSettings.getReadConcern(), connectionString.getReadConcern()));
|
||||
applySettings(builder::writeConcern, computeSettingsValue(defaultSettings.getWriteConcern(),
|
||||
mongoClientSettings.getWriteConcern(), connectionString.getWriteConcern()));
|
||||
applySettings(builder::readPreference, computeSettingsValue(defaultSettings.getReadPreference(),
|
||||
mongoClientSettings.getReadPreference(), connectionString.getReadPreference()));
|
||||
applySettings(builder::retryReads, computeSettingsValue(defaultSettings.getRetryReads(),
|
||||
mongoClientSettings.getRetryReads(), connectionString.getRetryReads()));
|
||||
applySettings(builder::retryWrites, computeSettingsValue(defaultSettings.getRetryWrites(),
|
||||
mongoClientSettings.getRetryWrites(), connectionString.getRetryWritesValue()));
|
||||
applySettings(builder::uuidRepresentation,
|
||||
computeSettingsValue(null, mongoClientSettings.getUuidRepresentation(), UuidRepresentation.JAVA_LEGACY));
|
||||
}
|
||||
|
||||
if (!CollectionUtils.isEmpty(credential)) {
|
||||
builder = builder.credential(credential.iterator().next());
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(replicaSet)) {
|
||||
builder.applyToClusterSettings((settings) -> {
|
||||
settings.requiredReplicaSetName(replicaSet);
|
||||
});
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private <T> void applySettings(Consumer<T> settingsBuilder, @Nullable T value) {
|
||||
|
||||
if (ObjectUtils.isEmpty(value)) {
|
||||
return;
|
||||
}
|
||||
settingsBuilder.accept(value);
|
||||
}
|
||||
|
||||
private <S, T> T computeSettingsValue(Function<S, T> function, S defaultValueHolder, S settingsValueHolder,
|
||||
@Nullable T connectionStringValue) {
|
||||
return computeSettingsValue(function.apply(defaultValueHolder), function.apply(settingsValueHolder),
|
||||
connectionStringValue);
|
||||
}
|
||||
|
||||
private <T> T computeSettingsValue(T defaultValue, T fromSettings, T fromConnectionString) {
|
||||
|
||||
boolean fromSettingsIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromSettings);
|
||||
boolean fromConnectionStringIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromConnectionString);
|
||||
|
||||
if (!fromSettingsIsDefault) {
|
||||
return fromSettings;
|
||||
}
|
||||
return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue;
|
||||
return createMongoClient();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -348,11 +152,43 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
}
|
||||
}
|
||||
|
||||
private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
private MongoClient createMongoClient() throws UnknownHostException {
|
||||
|
||||
if (!CollectionUtils.isEmpty(replicaSetSeeds)) {
|
||||
return new MongoClient(replicaSetSeeds, credentials, mongoClientOptions);
|
||||
}
|
||||
|
||||
return new MongoClient(createConfiguredOrDefaultServerAddress(), credentials, mongoClientOptions);
|
||||
}
|
||||
|
||||
private String getOrDefault(Object value, String defaultValue) {
|
||||
return !StringUtils.isEmpty(value) ? value.toString() : defaultValue;
|
||||
private ServerAddress createConfiguredOrDefaultServerAddress() throws UnknownHostException {
|
||||
|
||||
ServerAddress defaultAddress = new ServerAddress();
|
||||
|
||||
return new ServerAddress(StringUtils.hasText(host) ? host : defaultAddress.getHost(),
|
||||
port != null ? port.intValue() : defaultAddress.getPort());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given array as {@link List} with all {@literal null} elements removed.
|
||||
*
|
||||
* @param elements the elements to filter <T>, can be {@literal null}.
|
||||
* @return a new unmodifiable {@link List#} from the given elements without {@literal null}s.
|
||||
*/
|
||||
private static <T> List<T> filterNonNullElementsAsList(@Nullable T[] elements) {
|
||||
|
||||
if (elements == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<T> candidateElements = new ArrayList<T>();
|
||||
|
||||
for (T element : elements) {
|
||||
if (element != null) {
|
||||
candidateElements.add(element);
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableList(candidateElements);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,338 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.DBDecoderFactory;
|
||||
import com.mongodb.DBEncoderFactory;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientOptions} instance.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2 - There is no replacement for this {@link org.springframework.beans.factory.FactoryBean}.
|
||||
* However moving forward there will be a dedicated factory bean for {@link com.mongodb.MongoClientSettings}
|
||||
* replacing {@link MongoClientOptions}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClientOptions> {
|
||||
|
||||
private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build();
|
||||
|
||||
// TODO: Mongo Driver 4 - use application name insetad of description if not available
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getApplicationName();
|
||||
private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost();
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost();
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS
|
||||
.getThreadsAllowedToBlockForConnectionMultiplier();
|
||||
private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime();
|
||||
private int maxConnectionIdleTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionIdleTime();
|
||||
private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime();
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout();
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout();
|
||||
|
||||
// TODO: Mongo Driver 4 - check if available
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive();
|
||||
private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference();
|
||||
private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory();
|
||||
private DBEncoderFactory dbEncoderFactory = DEFAULT_MONGO_OPTIONS.getDbEncoderFactory();
|
||||
private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern();
|
||||
private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory();
|
||||
private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled();
|
||||
|
||||
// TODO: Mongo Driver 4 - remove this option
|
||||
private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans();
|
||||
private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency();
|
||||
private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency();
|
||||
private int heartbeatConnectTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatConnectTimeout();
|
||||
private int heartbeatSocketTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatSocketTimeout();
|
||||
private String requiredReplicaSetName = DEFAULT_MONGO_OPTIONS.getRequiredReplicaSetName();
|
||||
private int serverSelectionTimeout = DEFAULT_MONGO_OPTIONS.getServerSelectionTimeout();
|
||||
|
||||
private boolean ssl;
|
||||
private @Nullable SSLSocketFactory sslSocketFactory;
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClient} description.
|
||||
*
|
||||
* @param description
|
||||
*/
|
||||
// TODO: Mongo Driver 4 - deprecate that one and add application name
|
||||
public void setDescription(@Nullable String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum number of connections per host.
|
||||
*
|
||||
* @param minConnectionsPerHost
|
||||
*/
|
||||
public void setMinConnectionsPerHost(int minConnectionsPerHost) {
|
||||
this.minConnectionsPerHost = minConnectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of connections allowed per host. Will block if run out. Default is 10. System property
|
||||
* {@code MONGO.POOLSIZE} can override
|
||||
*
|
||||
* @param connectionsPerHost
|
||||
*/
|
||||
public void setConnectionsPerHost(int connectionsPerHost) {
|
||||
this.connectionsPerHost = connectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the multiplier for connectionsPerHost for # of threads that can block. Default is 5. If connectionsPerHost is
|
||||
* 10, and threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an
|
||||
* exception will be thrown.
|
||||
*
|
||||
* @param threadsAllowedToBlockForConnectionMultiplier
|
||||
*/
|
||||
public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) {
|
||||
this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
*
|
||||
* @param maxWaitTime
|
||||
*/
|
||||
public void setMaxWaitTime(int maxWaitTime) {
|
||||
this.maxWaitTime = maxWaitTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum idle time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionIdleTime
|
||||
*/
|
||||
public void setMaxConnectionIdleTime(int maxConnectionIdleTime) {
|
||||
this.maxConnectionIdleTime = maxConnectionIdleTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum life time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionLifeTime
|
||||
*/
|
||||
public void setMaxConnectionLifeTime(int maxConnectionLifeTime) {
|
||||
this.maxConnectionLifeTime = maxConnectionLifeTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout in milliseconds. 0 is default and infinite.
|
||||
*
|
||||
* @param connectTimeout
|
||||
*/
|
||||
public void setConnectTimeout(int connectTimeout) {
|
||||
this.connectTimeout = connectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout. 0 is default and infinite.
|
||||
*
|
||||
* @param socketTimeout
|
||||
*/
|
||||
public void setSocketTimeout(int socketTimeout) {
|
||||
this.socketTimeout = socketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
*
|
||||
* @param socketKeepAlive
|
||||
*/
|
||||
public void setSocketKeepAlive(boolean socketKeepAlive) {
|
||||
this.socketKeepAlive = socketKeepAlive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(@Nullable ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern} that will be the default value used when asking the {@link MongoDbFactory} for a DB
|
||||
* object.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(@Nullable WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketFactory
|
||||
*/
|
||||
public void setSocketFactory(@Nullable SocketFactory socketFactory) {
|
||||
this.socketFactory = socketFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the frequency that the driver will attempt to determine the current state of each server in the cluster.
|
||||
*
|
||||
* @param heartbeatFrequency
|
||||
*/
|
||||
public void setHeartbeatFrequency(int heartbeatFrequency) {
|
||||
this.heartbeatFrequency = heartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* In the event that the driver has to frequently re-check a server's availability, it will wait at least this long
|
||||
* since the previous check to avoid wasted effort.
|
||||
*
|
||||
* @param minHeartbeatFrequency
|
||||
*/
|
||||
public void setMinHeartbeatFrequency(int minHeartbeatFrequency) {
|
||||
this.minHeartbeatFrequency = minHeartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatConnectTimeout
|
||||
*/
|
||||
public void setHeartbeatConnectTimeout(int heartbeatConnectTimeout) {
|
||||
this.heartbeatConnectTimeout = heartbeatConnectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatSocketTimeout
|
||||
*/
|
||||
public void setHeartbeatSocketTimeout(int heartbeatSocketTimeout) {
|
||||
this.heartbeatSocketTimeout = heartbeatSocketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the name of the replica set.
|
||||
*
|
||||
* @param requiredReplicaSetName
|
||||
*/
|
||||
public void setRequiredReplicaSetName(String requiredReplicaSetName) {
|
||||
this.requiredReplicaSetName = requiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls if the driver should us an SSL connection. Defaults to {@literal false}.
|
||||
*
|
||||
* @param ssl
|
||||
*/
|
||||
public void setSsl(boolean ssl) {
|
||||
this.ssl = ssl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SSLSocketFactory} to use for the {@literal SSL} connection. If none is configured here,
|
||||
* {@link SSLSocketFactory#getDefault()} will be used.
|
||||
*
|
||||
* @param sslSocketFactory
|
||||
*/
|
||||
public void setSslSocketFactory(@Nullable SSLSocketFactory sslSocketFactory) {
|
||||
|
||||
this.sslSocketFactory = sslSocketFactory;
|
||||
this.ssl = sslSocketFactory != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@literal server selection timeout} in msec for a 3.x MongoDB Java driver. If not set the default value of
|
||||
* 30 sec will be used. A value of 0 means that it will timeout immediately if no server is available. A negative
|
||||
* value means to wait indefinitely.
|
||||
*
|
||||
* @param serverSelectionTimeout in msec.
|
||||
*/
|
||||
public void setServerSelectionTimeout(int serverSelectionTimeout) {
|
||||
this.serverSelectionTimeout = serverSelectionTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link AutoEncryptionSettings} to be used.
|
||||
*
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance()
|
||||
*/
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@Override
|
||||
protected MongoClientOptions createInstance() throws Exception {
|
||||
|
||||
SocketFactory socketFactoryToUse = ssl
|
||||
? (sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault())
|
||||
: this.socketFactory;
|
||||
|
||||
return MongoClientOptions.builder() //
|
||||
.alwaysUseMBeans(this.alwaysUseMBeans) //
|
||||
.connectionsPerHost(this.connectionsPerHost) //
|
||||
.connectTimeout(connectTimeout) //
|
||||
.cursorFinalizerEnabled(cursorFinalizerEnabled) //
|
||||
.dbDecoderFactory(dbDecoderFactory) //
|
||||
.dbEncoderFactory(dbEncoderFactory) //
|
||||
.applicationName(description) // TODO: Mongo Driver 4 - use application name if description not available
|
||||
.heartbeatConnectTimeout(heartbeatConnectTimeout) //
|
||||
.heartbeatFrequency(heartbeatFrequency) //
|
||||
.heartbeatSocketTimeout(heartbeatSocketTimeout) //
|
||||
.maxConnectionIdleTime(maxConnectionIdleTime) //
|
||||
.maxConnectionLifeTime(maxConnectionLifeTime) //
|
||||
.maxWaitTime(maxWaitTime) //
|
||||
.minConnectionsPerHost(minConnectionsPerHost) //
|
||||
.minHeartbeatFrequency(minHeartbeatFrequency) //
|
||||
.readPreference(readPreference) //
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.sslEnabled(ssl) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.socketFactory(socketFactoryToUse) // TODO: Mongo Driver 4 -
|
||||
.socketKeepAlive(socketKeepAlive) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
.writeConcern(writeConcern).build();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientOptions.class;
|
||||
}
|
||||
}
|
||||
@@ -1,499 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.ServerApi;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.connection.ClusterConnectionMode;
|
||||
import com.mongodb.connection.ClusterType;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private @Nullable Boolean retryReads = null;
|
||||
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private @Nullable Boolean retryWrites = null;
|
||||
|
||||
private @Nullable String applicationName = null;
|
||||
|
||||
private @Nullable UuidRepresentation uUidRepresentation = null;
|
||||
|
||||
// --> Socket Settings
|
||||
|
||||
private int socketConnectTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings()
|
||||
.getConnectTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReadTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReadTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReceiveBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReceiveBufferSize();
|
||||
private int socketSendBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getSendBufferSize();
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
private @Nullable String clusterSrvHost = DEFAULT_MONGO_SETTINGS.getClusterSettings().getSrvHost();
|
||||
private List<ServerAddress> clusterHosts = Collections.emptyList();
|
||||
private @Nullable ClusterConnectionMode clusterConnectionMode = null;
|
||||
private ClusterType custerRequiredClusterType = DEFAULT_MONGO_SETTINGS.getClusterSettings().getRequiredClusterType();
|
||||
private String clusterRequiredReplicaSetName = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getRequiredReplicaSetName();
|
||||
private long clusterLocalThresholdMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getLocalThreshold(TimeUnit.MILLISECONDS);
|
||||
private long clusterServerSelectionTimeoutMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getServerSelectionTimeout(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
private int poolMaxSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMaxSize();
|
||||
private int poolMinSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMinSize();
|
||||
private long poolMaxWaitTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxWaitTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionLifeTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionIdleTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceInitialDelayMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceFrequencyMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
private boolean sslEnabled = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled();
|
||||
private boolean sslInvalidHostNameAllowed = DEFAULT_MONGO_SETTINGS.getSslSettings().isInvalidHostNameAllowed();
|
||||
private String sslProvider = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled()
|
||||
? DEFAULT_MONGO_SETTINGS.getSslSettings().getContext().getProvider().getName()
|
||||
: "";
|
||||
|
||||
// encryption and retry
|
||||
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
private @Nullable ServerApi serverApi;
|
||||
|
||||
/**
|
||||
* @param socketConnectTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#connectTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketConnectTimeoutMS(int socketConnectTimeoutMS) {
|
||||
this.socketConnectTimeoutMS = socketConnectTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReadTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#readTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketReadTimeoutMS(int socketReadTimeoutMS) {
|
||||
this.socketReadTimeoutMS = socketReadTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReceiveBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#receiveBufferSize(int)
|
||||
*/
|
||||
public void setSocketReceiveBufferSize(int socketReceiveBufferSize) {
|
||||
this.socketReceiveBufferSize = socketReceiveBufferSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketSendBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#sendBufferSize(int)
|
||||
*/
|
||||
public void setSocketSendBufferSize(int socketSendBufferSize) {
|
||||
this.socketSendBufferSize = socketSendBufferSize;
|
||||
}
|
||||
|
||||
// --> Server Settings
|
||||
|
||||
private long serverHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
private long serverMinHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
/**
|
||||
* @param serverHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#heartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerHeartbeatFrequencyMS(long serverHeartbeatFrequencyMS) {
|
||||
this.serverHeartbeatFrequencyMS = serverHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverMinHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#minHeartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerMinHeartbeatFrequencyMS(long serverMinHeartbeatFrequencyMS) {
|
||||
this.serverMinHeartbeatFrequencyMS = serverMinHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
/**
|
||||
* @param clusterSrvHost
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#srvHost(String)
|
||||
*/
|
||||
public void setClusterSrvHost(String clusterSrvHost) {
|
||||
this.clusterSrvHost = clusterSrvHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterHosts
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#hosts(List)
|
||||
*/
|
||||
public void setClusterHosts(ServerAddress[] clusterHosts) {
|
||||
this.clusterHosts = Arrays.asList(clusterHosts);
|
||||
}
|
||||
|
||||
/**
|
||||
* ????
|
||||
*
|
||||
* @param clusterConnectionMode
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#mode(ClusterConnectionMode)
|
||||
*/
|
||||
public void setClusterConnectionMode(ClusterConnectionMode clusterConnectionMode) {
|
||||
this.clusterConnectionMode = clusterConnectionMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param custerRequiredClusterType
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredClusterType(ClusterType)
|
||||
*/
|
||||
public void setCusterRequiredClusterType(ClusterType custerRequiredClusterType) {
|
||||
this.custerRequiredClusterType = custerRequiredClusterType;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterRequiredReplicaSetName
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredReplicaSetName(String)
|
||||
*/
|
||||
public void setClusterRequiredReplicaSetName(String clusterRequiredReplicaSetName) {
|
||||
this.clusterRequiredReplicaSetName = clusterRequiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterLocalThresholdMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#localThreshold(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterLocalThresholdMS(long clusterLocalThresholdMS) {
|
||||
this.clusterLocalThresholdMS = clusterLocalThresholdMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterServerSelectionTimeoutMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#serverSelectionTimeout(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterServerSelectionTimeoutMS(long clusterServerSelectionTimeoutMS) {
|
||||
this.clusterServerSelectionTimeoutMS = clusterServerSelectionTimeoutMS;
|
||||
}
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
/**
|
||||
* @param poolMaxSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxSize(int)
|
||||
*/
|
||||
public void setPoolMaxSize(int poolMaxSize) {
|
||||
this.poolMaxSize = poolMaxSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMinSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#minSize(int)
|
||||
*/
|
||||
public void setPoolMinSize(int poolMinSize) {
|
||||
this.poolMinSize = poolMinSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxWaitTimeMS in mesec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxWaitTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxWaitTimeMS(long poolMaxWaitTimeMS) {
|
||||
this.poolMaxWaitTimeMS = poolMaxWaitTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionLifeTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionLifeTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionLifeTimeMS(long poolMaxConnectionLifeTimeMS) {
|
||||
this.poolMaxConnectionLifeTimeMS = poolMaxConnectionLifeTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionIdleTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionIdleTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionIdleTimeMS(long poolMaxConnectionIdleTimeMS) {
|
||||
this.poolMaxConnectionIdleTimeMS = poolMaxConnectionIdleTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceInitialDelayMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceInitialDelay(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceInitialDelayMS(long poolMaintenanceInitialDelayMS) {
|
||||
this.poolMaintenanceInitialDelayMS = poolMaintenanceInitialDelayMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceFrequencyMS(long poolMaintenanceFrequencyMS) {
|
||||
this.poolMaintenanceFrequencyMS = poolMaintenanceFrequencyMS;
|
||||
}
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
/**
|
||||
* @param sslEnabled
|
||||
* @see com.mongodb.connection.SslSettings.Builder#enabled(boolean)
|
||||
*/
|
||||
public void setSslEnabled(Boolean sslEnabled) {
|
||||
this.sslEnabled = sslEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslInvalidHostNameAllowed
|
||||
* @see com.mongodb.connection.SslSettings.Builder#invalidHostNameAllowed(boolean)
|
||||
*/
|
||||
public void setSslInvalidHostNameAllowed(Boolean sslInvalidHostNameAllowed) {
|
||||
this.sslInvalidHostNameAllowed = sslInvalidHostNameAllowed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslProvider
|
||||
* @see com.mongodb.connection.SslSettings.Builder#context(SSLContext)
|
||||
* @see SSLContext#getInstance(String)
|
||||
*/
|
||||
public void setSslProvider(String sslProvider) {
|
||||
this.sslProvider = sslProvider;
|
||||
}
|
||||
|
||||
// encryption and retry
|
||||
|
||||
/**
|
||||
* @param applicationName
|
||||
* @see MongoClientSettings.Builder#applicationName(String)
|
||||
*/
|
||||
public void setApplicationName(@Nullable String applicationName) {
|
||||
this.applicationName = applicationName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryReads
|
||||
* @see MongoClientSettings.Builder#retryReads(boolean)
|
||||
*/
|
||||
public void setRetryReads(@Nullable Boolean retryReads) {
|
||||
this.retryReads = retryReads;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readConcern
|
||||
* @see MongoClientSettings.Builder#readConcern(ReadConcern)
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param writeConcern
|
||||
* @see MongoClientSettings.Builder#writeConcern(WriteConcern)
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryWrites
|
||||
* @see MongoClientSettings.Builder#retryWrites(boolean)
|
||||
*/
|
||||
public void setRetryWrites(@Nullable Boolean retryWrites) {
|
||||
this.retryWrites = retryWrites;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readPreference
|
||||
* @see MongoClientSettings.Builder#readPreference(ReadPreference)
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param streamFactoryFactory
|
||||
* @see MongoClientSettings.Builder#streamFactoryFactory(StreamFactoryFactory)
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param codecRegistry
|
||||
* @see MongoClientSettings.Builder#codecRegistry(CodecRegistry)
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param uUidRepresentation
|
||||
*/
|
||||
public void setuUidRepresentation(@Nullable UuidRepresentation uUidRepresentation) {
|
||||
this.uUidRepresentation = uUidRepresentation;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#autoEncryptionSettings(AutoEncryptionSettings)
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverApi can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#serverApi(ServerApi)
|
||||
* @since 3.3
|
||||
*/
|
||||
public void setServerApi(@Nullable ServerApi serverApi) {
|
||||
this.serverApi = serverApi;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() {
|
||||
|
||||
Builder builder = MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.applicationName(applicationName) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.applyToClusterSettings((settings) -> {
|
||||
|
||||
settings.serverSelectionTimeout(clusterServerSelectionTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
if (clusterConnectionMode != null) {
|
||||
settings.mode(clusterConnectionMode);
|
||||
}
|
||||
settings.requiredReplicaSetName(clusterRequiredReplicaSetName);
|
||||
|
||||
if (!CollectionUtils.isEmpty(clusterHosts)) {
|
||||
settings.hosts(clusterHosts);
|
||||
}
|
||||
settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(clusterMaxWaitQueueSize);
|
||||
settings.requiredClusterType(custerRequiredClusterType);
|
||||
|
||||
if (StringUtils.hasText(clusterSrvHost)) {
|
||||
settings.srvHost(clusterSrvHost);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings((settings) -> {
|
||||
|
||||
settings.minSize(poolMinSize);
|
||||
settings.maxSize(poolMaxSize);
|
||||
settings.maxConnectionIdleTime(poolMaxConnectionIdleTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxWaitTime(poolMaxWaitTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxConnectionLifeTime(poolMaxConnectionLifeTimeMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(poolMaxWaitQueueSize);
|
||||
settings.maintenanceFrequency(poolMaintenanceFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.maintenanceInitialDelay(poolMaintenanceInitialDelayMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToServerSettings((settings) -> {
|
||||
|
||||
settings.minHeartbeatFrequency(serverMinHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.heartbeatFrequency(serverHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToSocketSettings((settings) -> {
|
||||
|
||||
settings.connectTimeout(socketConnectTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.readTimeout(socketReadTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.receiveBufferSize(socketReceiveBufferSize);
|
||||
settings.sendBufferSize(socketSendBufferSize);
|
||||
}) //
|
||||
.applyToSslSettings((settings) -> {
|
||||
|
||||
settings.enabled(sslEnabled);
|
||||
if (sslEnabled) {
|
||||
|
||||
settings.invalidHostNameAllowed(sslInvalidHostNameAllowed);
|
||||
try {
|
||||
settings.context(
|
||||
StringUtils.hasText(sslProvider) ? SSLContext.getInstance(sslProvider) : SSLContext.getDefault());
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (streamFactoryFactory != null) {
|
||||
builder = builder.streamFactoryFactory(streamFactoryFactory);
|
||||
}
|
||||
if (retryReads != null) {
|
||||
builder = builder.retryReads(retryReads);
|
||||
}
|
||||
if (retryWrites != null) {
|
||||
builder = builder.retryWrites(retryWrites);
|
||||
}
|
||||
if (uUidRepresentation != null) {
|
||||
builder = builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
if (serverApi != null) {
|
||||
builder = builder.serverApi(serverApi);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,10 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteConcernResult;
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
/**
|
||||
* Mongo-specific {@link DataIntegrityViolationException}.
|
||||
@@ -29,18 +30,18 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
|
||||
private static final long serialVersionUID = -186980521176764046L;
|
||||
|
||||
private final WriteConcernResult writeResult;
|
||||
private final WriteResult writeResult;
|
||||
private final MongoActionOperation actionOperation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteConcernResult}.
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteResult}.
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param writeResult the {@link WriteConcernResult} that causes the exception, must not be {@literal null}.
|
||||
* @param writeResult the {@link WriteResult} that causes the exception, must not be {@literal null}.
|
||||
* @param actionOperation the {@link MongoActionOperation} that caused the exception, must not be {@literal null}.
|
||||
*/
|
||||
public MongoDataIntegrityViolationException(String message, WriteConcernResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
public MongoDataIntegrityViolationException(String message, WriteResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
|
||||
super(message);
|
||||
|
||||
@@ -52,11 +53,11 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link WriteConcernResult} that caused the exception.
|
||||
* Returns the {@link WriteResult} that caused the exception.
|
||||
*
|
||||
* @return the writeResult
|
||||
*/
|
||||
public WriteConcernResult getWriteResult() {
|
||||
public WriteResult getWriteResult() {
|
||||
return writeResult;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,298 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <br />
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 3.0
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
*/
|
||||
public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDatabaseFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
* {@link MongoDatabaseFactorySupport} to close the client on {@link #destroy()}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return getMongoDatabase(getDefaultDatabaseName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDatabaseFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
static final class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory {
|
||||
|
||||
private final ClientSession session;
|
||||
private final MongoDatabaseFactory delegate;
|
||||
|
||||
public ClientSessionBoundMongoDbFactory(ClientSession session, MongoDatabaseFactory delegate) {
|
||||
this.session = session;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy(target.getClass().getClassLoader()));
|
||||
}
|
||||
|
||||
public ClientSession getSession() {
|
||||
return this.session;
|
||||
}
|
||||
|
||||
public MongoDatabaseFactory getDelegate() {
|
||||
return this.delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.session, that.session)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.delegate, that.delegate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(this.session);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session=" + this.getSession() + ", delegate="
|
||||
+ this.getDelegate() + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,23 +15,44 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <br />
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} and {@link com.mongodb.MongoClient}
|
||||
* defining common properties such as database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 2.1
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactorySupport} instead.
|
||||
* @see SimpleMongoDbFactory
|
||||
* @see SimpleMongoClientDbFactory
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySupport<C> {
|
||||
public abstract class MongoDbFactorySupport<C> implements MongoDbFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
@@ -45,6 +66,207 @@ public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySuppo
|
||||
*/
|
||||
protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, exceptionTranslator);
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return getDb(databaseName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return new MongoDbFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDbFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDbFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDbFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
return delegate.getLegacyDb();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy(target.getClass().getClassLoader()));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.BsonInvalidOperationException;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
@@ -30,6 +29,7 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.MongoTransactionException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
@@ -37,10 +37,10 @@ import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoServerException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
|
||||
/**
|
||||
@@ -51,7 +51,6 @@ import com.mongodb.bulk.BulkWriteError;
|
||||
* @author Oliver Gierke
|
||||
* @author Michal Vich
|
||||
* @author Christoph Strobl
|
||||
* @author Brice Vandeputte
|
||||
*/
|
||||
public class MongoExceptionTranslator implements PersistenceExceptionTranslator {
|
||||
|
||||
@@ -81,10 +80,6 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoSocketException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
String exception = ClassUtils.getShortName(ClassUtils.getUserClass(ex.getClass()));
|
||||
|
||||
if (DUPLICATE_KEY_EXCEPTIONS.contains(exception)) {
|
||||
@@ -117,6 +112,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof BulkWriteException) {
|
||||
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
@@ -136,7 +135,6 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
|
||||
return new MongoTransactionException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
|
||||
@@ -15,23 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -40,7 +24,6 @@ import org.springframework.util.Assert;
|
||||
* following mapping rules.
|
||||
* <p>
|
||||
* <strong>Required Properties</strong>
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>Properties of primitive type</li>
|
||||
* </ul>
|
||||
@@ -62,8 +45,7 @@ import org.springframework.util.Assert;
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
|
||||
* {@link Encrypted} properties will contain {@literal encrypt} information.
|
||||
* </p>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
@@ -78,88 +60,6 @@ public interface MongoJsonSchemaCreator {
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Filter matching {@link JsonSchemaProperty properties}.
|
||||
*
|
||||
* @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
MongoJsonSchemaCreator filter(Predicate<JsonSchemaPropertyContext> filter);
|
||||
|
||||
/**
|
||||
* The context in which a specific {@link #getProperty()} is encountered during schema creation.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
interface JsonSchemaPropertyContext {
|
||||
|
||||
/**
|
||||
* The path to a given field/property in dot notation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getPath();
|
||||
|
||||
/**
|
||||
* The current property.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoPersistentProperty getProperty();
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoPersistentEntity} for a given property.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return {@literal null} if the property is not an entity. It is nevertheless recommend to check
|
||||
* {@link PersistentProperty#isEntity()} first.
|
||||
*/
|
||||
@Nullable
|
||||
<T> MongoPersistentEntity<T> resolveEntity(MongoPersistentProperty property);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones.
|
||||
*
|
||||
* @return new instance of {@link Predicate}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static Predicate<JsonSchemaPropertyContext> encryptedOnly() {
|
||||
|
||||
return new Predicate<JsonSchemaPropertyContext>() {
|
||||
|
||||
// cycle guard
|
||||
private final Set<MongoPersistentProperty> seen = new HashSet<>();
|
||||
|
||||
@Override
|
||||
public boolean test(JsonSchemaPropertyContext context) {
|
||||
return extracted(context.getProperty(), context);
|
||||
}
|
||||
|
||||
private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) {
|
||||
if (property.isAnnotationPresent(Encrypted.class)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!property.isEntity() || seen.contains(property)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
seen.add(property);
|
||||
|
||||
for (MongoPersistentProperty nested : context.resolveEntity(property)) {
|
||||
if (extracted(nested, context)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
@@ -172,41 +72,4 @@ public interface MongoJsonSchemaCreator {
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential
|
||||
* {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MappingContext mappingContext) {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
return create(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We
|
||||
* recommend to use {@link #create(MappingContext)}.
|
||||
*
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static MongoJsonSchemaCreator create() {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER);
|
||||
mappingContext.afterPropertiesSet();
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
return create(converter);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,7 +27,6 @@ import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -41,13 +40,13 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.Cursor;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -58,7 +57,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
@@ -125,7 +124,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes a {@link DbCallback} translating any exceptions as necessary.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not
|
||||
@@ -138,7 +137,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link CollectionCallback} on the entity collection of the specified class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
@@ -151,7 +150,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link CollectionCallback} on the collection of the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be
|
||||
@@ -176,7 +175,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use the
|
||||
* {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}.
|
||||
*
|
||||
@@ -212,7 +211,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
@@ -223,10 +222,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB
|
||||
* {@link com.mongodb.client.FindIterable}.
|
||||
* {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -239,10 +237,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed
|
||||
* by a Mongo DB {@link com.mongodb.client.FindIterable}.
|
||||
* by a Mongo DB {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -300,7 +297,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* is created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -310,7 +307,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the name of the collection. Must not be {@literal null}.
|
||||
@@ -320,7 +317,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a given name exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -330,7 +327,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the name indicated by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}.
|
||||
@@ -339,7 +336,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection to drop/delete.
|
||||
@@ -361,7 +358,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
IndexOperations indexOps(Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.client.MongoDatabase} level.
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.DB} level.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 1.7
|
||||
@@ -403,10 +400,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the collection used by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -417,10 +414,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -519,11 +516,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class. The name of the inputCollection is derived from the inputType of
|
||||
* the aggregation.
|
||||
* <p>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -538,12 +535,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||
* of the inputCollection is derived from the inputType of the aggregation.
|
||||
* <br />
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class and are returned as stream. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -556,11 +553,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -575,11 +572,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -702,10 +699,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the
|
||||
* specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -720,10 +717,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -768,10 +765,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -784,10 +781,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a List of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -881,83 +878,71 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated or {@literal null}, if not found.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated or {@literal null}, if not found.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parametrized type.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as
|
||||
* it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as
|
||||
* it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* The collection name is derived from the {@literal replacement} type. <br />
|
||||
@@ -977,7 +962,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document.<br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
@@ -997,7 +982,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1018,7 +1003,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1041,7 +1026,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1066,7 +1051,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1094,7 +1079,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1120,9 +1105,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
|
||||
* specified type. The first document that matches the query is returned and also removed from the collection in the
|
||||
* database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1137,10 +1122,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type. The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1160,12 +1145,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1182,12 +1161,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1196,35 +1169,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*/
|
||||
long count(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <br />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the estimated number of documents.
|
||||
* @since 3.1
|
||||
*/
|
||||
default long estimatedCount(Class<?> entityClass) {
|
||||
|
||||
Assert.notNull(entityClass, "Entity class must not be null!");
|
||||
return estimatedCount(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <br />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @return the estimated number of documents.
|
||||
* @since 3.1
|
||||
*/
|
||||
long estimatedCount(String collectionName);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
@@ -1232,12 +1176,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1249,39 +1187,34 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T insert(T objectToSave);
|
||||
|
||||
/**
|
||||
* Insert the object into the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T insert(T objectToSave, String collectionName);
|
||||
|
||||
@@ -1315,42 +1248,37 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T save(T objectToSave);
|
||||
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
* is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type Conversion</a> for more details.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T save(T objectToSave, String collectionName);
|
||||
|
||||
@@ -1358,111 +1286,99 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult upsert(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult upsert(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult updateFirst(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult updateFirst(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1470,34 +1386,27 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the specified collection that matches the query document criteria with the
|
||||
* provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
* domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult updateMulti(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1505,16 +1414,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by {@literal id} and (if applicable) its
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.ServerApi;
|
||||
import com.mongodb.ServerApi.Builder;
|
||||
import com.mongodb.ServerApiVersion;
|
||||
|
||||
/**
|
||||
* {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class MongoServerApiFactoryBean implements FactoryBean<ServerApi> {
|
||||
|
||||
private String version;
|
||||
private @Nullable Boolean deprecationErrors;
|
||||
private @Nullable Boolean strict;
|
||||
|
||||
/**
|
||||
* @param version the version string either as the enum name or the server version value.
|
||||
* @see ServerApiVersion
|
||||
*/
|
||||
public void setVersion(String version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param deprecationErrors
|
||||
* @see ServerApi.Builder#deprecationErrors(boolean)
|
||||
*/
|
||||
public void setDeprecationErrors(@Nullable Boolean deprecationErrors) {
|
||||
this.deprecationErrors = deprecationErrors;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param strict
|
||||
* @see ServerApi.Builder#strict(boolean)
|
||||
*/
|
||||
public void setStrict(@Nullable Boolean strict) {
|
||||
this.strict = strict;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public ServerApi getObject() throws Exception {
|
||||
|
||||
Builder builder = ServerApi.builder().version(version());
|
||||
|
||||
if (deprecationErrors != null) {
|
||||
builder = builder.deprecationErrors(deprecationErrors);
|
||||
}
|
||||
if (strict != null) {
|
||||
builder = builder.strict(strict);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return ServerApi.class;
|
||||
}
|
||||
|
||||
private ServerApiVersion version() {
|
||||
try {
|
||||
// lookup by name eg. 'V1'
|
||||
return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// or just the version number, eg. just '1'
|
||||
return ServerApiVersion.findByValue(version);
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,63 +15,62 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.bson.Document;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.util.Predicates;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.ProjectionInformation;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* Common operations performed on properties of an entity like extracting fields information for projection creation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor(access = AccessLevel.PACKAGE)
|
||||
class PropertyOperations {
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
PropertyOperations(MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
this.mappingContext = mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for
|
||||
* creating the projection (target) type if the {@code EntityProjection} is a {@literal DTO projection} or a
|
||||
* creating the projection (target) type if the {@code targetType} is a {@literal DTO projection} or a
|
||||
* {@literal closed interface projection}.
|
||||
*
|
||||
* @param projection must not be {@literal null}.
|
||||
* @param projectionFactory must not be {@literal null}.
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @param targetType must not be {@literal null}.
|
||||
* @return {@link Document} with fields to be included.
|
||||
*/
|
||||
Document computeMappedFieldsForProjection(EntityProjection<?, ?> projection,
|
||||
Document fields) {
|
||||
Document computeFieldsForProjection(ProjectionFactory projectionFactory, Document fields, Class<?> domainType,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (!projection.isClosedProjection()) {
|
||||
if (!fields.isEmpty() || ClassUtils.isAssignable(domainType, targetType)) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = new Document();
|
||||
|
||||
if (projection.getMappedType().getType().isInterface()) {
|
||||
projection.forEach(it -> {
|
||||
projectedFields.put(it.getPropertyPath().getSegment(), 1);
|
||||
});
|
||||
if (targetType.isInterface()) {
|
||||
|
||||
ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType);
|
||||
|
||||
if (projectionInformation.isClosed()) {
|
||||
projectionInformation.getInputProperties().forEach(it -> projectedFields.append(it.getName(), 1));
|
||||
}
|
||||
} else {
|
||||
|
||||
// DTO projections use merged metadata between domain type and result type
|
||||
PersistentPropertyTranslator translator = PersistentPropertyTranslator.create(
|
||||
mappingContext.getRequiredPersistentEntity(projection.getDomainType()),
|
||||
Predicates.negate(MongoPersistentProperty::hasExplicitFieldName));
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getRequiredPersistentEntity(projection.getMappedType());
|
||||
for (MongoPersistentProperty property : persistentEntity) {
|
||||
projectedFields.put(translator.translate(property).getFieldName(), 1);
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(targetType);
|
||||
if (entity != null) {
|
||||
entity.doWithProperties(
|
||||
(SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,927 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoExpression;
|
||||
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationExpression;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.ShardKey;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
|
||||
/**
|
||||
* {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed.
|
||||
* This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options
|
||||
* for {@literal count}, {@literal remove}, and other methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class QueryOperations {
|
||||
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final EntityOperations entityOperations;
|
||||
private final PropertyOperations propertyOperations;
|
||||
private final CodecRegistryProvider codecRegistryProvider;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final AggregationUtil aggregationUtil;
|
||||
private final Map<Class<?>, Document> mappedShardKey = new ConcurrentHashMap<>(1);
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link QueryOperations}.
|
||||
*
|
||||
* @param queryMapper must not be {@literal null}.
|
||||
* @param updateMapper must not be {@literal null}.
|
||||
* @param entityOperations must not be {@literal null}.
|
||||
* @param propertyOperations must not be {@literal null}.
|
||||
* @param codecRegistryProvider must not be {@literal null}.
|
||||
*/
|
||||
QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations,
|
||||
PropertyOperations propertyOperations, CodecRegistryProvider codecRegistryProvider) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.entityOperations = entityOperations;
|
||||
this.propertyOperations = propertyOperations;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.mappingContext = queryMapper.getMappingContext();
|
||||
this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link QueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
QueryContext createQueryContext(Query query) {
|
||||
return new QueryContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link DistinctQueryContext}.
|
||||
*/
|
||||
DistinctQueryContext distinctQueryContext(Query query, String fieldName) {
|
||||
return new DistinctQueryContext(query, fieldName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link CountContext}.
|
||||
*/
|
||||
CountContext countQueryContext(Query query) {
|
||||
return new CountContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting multiple documents.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, true, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Document query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param replacement the {@link MappedDocument mapped replacement} document.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) {
|
||||
return new UpdateContext(replacement, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance removing all matching documents.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteQueryContext(Query query) {
|
||||
return new DeleteContext(query, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance only the first matching document.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteSingleContext(Query query) {
|
||||
return new DeleteContext(query, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link AggregationDefinition} for the given {@link Aggregation}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param inputType fallback mapping type in case of untyped aggregation. Can be {@literal null}.
|
||||
* @return new instance of {@link AggregationDefinition}.
|
||||
* @since 3.2
|
||||
*/
|
||||
AggregationDefinition createAggregation(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
return new AggregationDefinition(aggregation, inputType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link AggregationDefinition} for the given {@link Aggregation}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param aggregationOperationContext the {@link AggregationOperationContext} to use. Can be {@literal null}.
|
||||
* @return new instance of {@link AggregationDefinition}.
|
||||
* @since 3.2
|
||||
*/
|
||||
AggregationDefinition createAggregation(Aggregation aggregation,
|
||||
@Nullable AggregationOperationContext aggregationOperationContext) {
|
||||
return new AggregationDefinition(aggregation, aggregationOperationContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document
|
||||
* representation, mapping field names, as well as determining and applying {@link Collation collations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class QueryContext {
|
||||
|
||||
private final Query query;
|
||||
|
||||
/**
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be either a {@link Query} or a
|
||||
* plain {@link Document}.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
private QueryContext(@Nullable Query query) {
|
||||
this.query = query != null ? query : new Query();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Query getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the raw {@link Query#getQueryObject() unmapped document} from the {@link Query}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Document getQueryObject() {
|
||||
return query.getQueryObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param entityLookup the {@link Function lookup} used to provide the {@link MongoPersistentEntity} for the
|
||||
* given{@literal domainType}
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable Class<T> domainType,
|
||||
Function<Class<T>, MongoPersistentEntity<?>> entityLookup) {
|
||||
return getMappedQuery(domainType == null ? null : entityLookup.apply(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> entity) {
|
||||
return queryMapper.getMappedObject(getQueryObject(), entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity,
|
||||
EntityProjection<?, ?> projection) {
|
||||
|
||||
Document fields = evaluateFields(entity);
|
||||
|
||||
if (entity == null) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document mappedFields;
|
||||
if (!fields.isEmpty()) {
|
||||
mappedFields = queryMapper.getMappedFields(fields, entity);
|
||||
} else {
|
||||
mappedFields = propertyOperations.computeMappedFieldsForProjection(projection, fields);
|
||||
mappedFields = queryMapper.addMetaAttributes(mappedFields, entity);
|
||||
}
|
||||
|
||||
if (entity.hasTextScoreProperty() && mappedFields.containsKey(entity.getTextScoreProperty().getFieldName())
|
||||
&& !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
if (mappedFields.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
private Document evaluateFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
|
||||
if (fields.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
Document evaluated = new Document();
|
||||
|
||||
for (Entry<String, Object> entry : fields.entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof MongoExpression) {
|
||||
|
||||
AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
|
||||
: new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
|
||||
|
||||
evaluated.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
|
||||
} else {
|
||||
evaluated.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
return evaluated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped {@link Query#getSortObject() sort} option.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document getMappedSort(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedSort(query.getSortObject(), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the {@link com.mongodb.client.model.Collation} if present extracted from the {@link Query} or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param consumer must not be {@literal null}.
|
||||
*/
|
||||
void applyCollation(@Nullable Class<?> domainType, Consumer<com.mongodb.client.model.Collation> consumer) {
|
||||
getCollation(domainType).ifPresent(consumer::accept);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link com.mongodb.client.model.Collation} extracted from the {@link Query} if present or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Optional<com.mongodb.client.model.Collation> getCollation(@Nullable Class<?> domainType) {
|
||||
|
||||
return entityOperations.forType(domainType).getCollation(query) //
|
||||
.map(Collation::toMongoCollation);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal distinct} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DistinctQueryContext extends QueryContext {
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param fieldName must not be {@literal null}.
|
||||
*/
|
||||
private DistinctQueryContext(@Nullable Object query, String fieldName) {
|
||||
|
||||
super(query instanceof Document ? new BasicQuery((Document) query) : (Query) query);
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity,
|
||||
EntityProjection<?, ?> projection) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedFields(new Document(fieldName, 1), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the mapped field name to project to.
|
||||
*
|
||||
* @param entity can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getMappedFieldName(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return getMappedFields(entity).keySet().iterator().next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the MongoDB native representation of the given {@literal type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
<T> Class<T> getDriverCompatibleClass(Class<T> type) {
|
||||
|
||||
return codecRegistryProvider.getCodecFor(type) //
|
||||
.map(Codec::getEncoderClass) //
|
||||
.orElse((Class<T>) BsonValue.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the most speficic read target type based on the user {@literal requestedTargetType} an the property type
|
||||
* based on meta information extracted from the {@literal domainType}.
|
||||
*
|
||||
* @param requestedTargetType must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Class<?> getMostSpecificConversionTargetType(Class<?> requestedTargetType, Class<?> domainType) {
|
||||
|
||||
Class<?> conversionTargetType = requestedTargetType;
|
||||
try {
|
||||
|
||||
Class<?> propertyType = PropertyPath.from(fieldName, domainType).getLeafProperty().getLeafType();
|
||||
|
||||
// use the more specific type but favor UserType over property one
|
||||
if (ClassUtils.isAssignable(requestedTargetType, propertyType)) {
|
||||
conversionTargetType = propertyType;
|
||||
}
|
||||
} catch (PropertyReferenceException e) {
|
||||
// just don't care about it as we default to Object.class anyway.
|
||||
}
|
||||
|
||||
return conversionTargetType;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal count} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class CountContext extends QueryContext {
|
||||
|
||||
/**
|
||||
* Creates a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
CountContext(@Nullable Query query) {
|
||||
super(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType) {
|
||||
return getCountOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType, @Nullable Consumer<CountOptions> callback) {
|
||||
|
||||
CountOptions options = new CountOptions();
|
||||
Query query = getQuery();
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
options.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSkip() > 0) {
|
||||
options.skip((int) query.getSkip());
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
|
||||
String hint = query.getHint();
|
||||
if (BsonUtils.isJsonDocument(hint)) {
|
||||
options.hint(BsonUtils.parse(hint, codecRegistryProvider));
|
||||
} else {
|
||||
options.hintString(hint);
|
||||
}
|
||||
}
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal delete} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DeleteContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
|
||||
/**
|
||||
* Crate a new {@link DeleteContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to remove all matching documents, {@literal false} for just the first one.
|
||||
*/
|
||||
DeleteContext(@Nullable Query query, boolean multi) {
|
||||
|
||||
super(query);
|
||||
this.multi = multi;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType) {
|
||||
return getDeleteOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType, @Nullable Consumer<DeleteOptions> callback) {
|
||||
|
||||
DeleteOptions options = new DeleteOptions();
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents shall be deleted.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal updates}.
|
||||
*/
|
||||
class UpdateContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
private final boolean upsert;
|
||||
private final @Nullable UpdateDefinition update;
|
||||
private final @Nullable MappedDocument mappedDocument;
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, Document query, boolean multi, boolean upsert) {
|
||||
this(update, new BasicQuery(query), multi, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, @Nullable Query query, boolean multi, boolean upsert) {
|
||||
|
||||
super(query);
|
||||
|
||||
this.multi = multi;
|
||||
this.upsert = upsert;
|
||||
this.update = update;
|
||||
this.mappedDocument = null;
|
||||
}
|
||||
|
||||
UpdateContext(MappedDocument update, boolean upsert) {
|
||||
|
||||
super(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())));
|
||||
this.multi = false;
|
||||
this.upsert = upsert;
|
||||
this.mappedDocument = update;
|
||||
this.update = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType) {
|
||||
return getUpdateOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType, @Nullable Consumer<UpdateOptions> callback) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update != null && update.hasArrayFilters()) {
|
||||
options
|
||||
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType) {
|
||||
return getReplaceOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType, @Nullable Consumer<ReplaceOptions> callback) {
|
||||
|
||||
UpdateOptions updateOptions = getUpdateOptions(domainType);
|
||||
|
||||
ReplaceOptions options = new ReplaceOptions();
|
||||
options.collation(updateOptions.getCollation());
|
||||
options.upsert(updateOptions.isUpsert());
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> domainType) {
|
||||
|
||||
Document mappedQuery = super.getMappedQuery(domainType);
|
||||
|
||||
if (multi && update.isIsolated() && !mappedQuery.containsKey("$isolated")) {
|
||||
mappedQuery.put("$isolated", 1);
|
||||
}
|
||||
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
<T> Document applyShardKey(MongoPersistentEntity<T> domainType, Document filter, @Nullable Document existing) {
|
||||
|
||||
Document shardKeySource = existing != null ? existing
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType)
|
||||
.forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue(shardKeySource, key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
|
||||
boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity<?> domainType) {
|
||||
|
||||
return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType)
|
||||
&& !filter.keySet().containsAll(getMappedShardKeyFields(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities
|
||||
* {@literal id} property.
|
||||
* @since 3.0
|
||||
*/
|
||||
private boolean shardedById(MongoPersistentEntity<?> domainType) {
|
||||
|
||||
ShardKey shardKey = domainType.getShardKey();
|
||||
if (shardKey.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String key = shardKey.getPropertyNames().iterator().next();
|
||||
if ("_id".equals(key)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentProperty idProperty = domainType.getIdProperty();
|
||||
return idProperty != null && idProperty.getName().equals(key);
|
||||
}
|
||||
|
||||
Set<String> getMappedShardKeyFields(MongoPersistentEntity<?> entity) {
|
||||
return getMappedShardKey(entity).keySet();
|
||||
}
|
||||
|
||||
Document getMappedShardKey(MongoPersistentEntity<?> entity) {
|
||||
return mappedShardKey.computeIfAbsent(entity.getType(),
|
||||
key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<Document> getUpdatePipeline(@Nullable Class<?> domainType) {
|
||||
|
||||
Class<?> type = domainType != null ? domainType : Object.class;
|
||||
|
||||
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext,
|
||||
queryMapper);
|
||||
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped update {@link Document}.
|
||||
*
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
Document getMappedUpdate(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (update != null) {
|
||||
return update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
}
|
||||
return mappedDocument.getDocument();
|
||||
}
|
||||
|
||||
/**
|
||||
* Increase a potential {@link MongoPersistentEntity#getVersionProperty() version property} prior to update if not
|
||||
* already done in the actual {@link UpdateDefinition}
|
||||
*
|
||||
* @param persistentEntity can be {@literal null}.
|
||||
*/
|
||||
void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
|
||||
String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName();
|
||||
if (!update.modifies(versionFieldName)) {
|
||||
update.inc(versionFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the update holds an aggregation pipeline.
|
||||
*/
|
||||
boolean isAggregationUpdate() {
|
||||
return update instanceof AggregationUpdate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents should be updated.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A value object that encapsulates common tasks required when running {@literal aggregations}.
|
||||
*
|
||||
* @since 3.2
|
||||
*/
|
||||
class AggregationDefinition {
|
||||
|
||||
private final Aggregation aggregation;
|
||||
private final Lazy<AggregationOperationContext> aggregationOperationContext;
|
||||
private final Lazy<List<Document>> pipeline;
|
||||
private final @Nullable Class<?> inputType;
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link AggregationDefinition} extracting the input type from either the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or
|
||||
* the given {@literal aggregationOperationContext} if present. <br />
|
||||
* Creates a new {@link AggregationOperationContext} if none given, based on the {@link Aggregation} input type and
|
||||
* the desired {@link AggregationOptions#getDomainTypeMapping() domain type mapping}. <br />
|
||||
* Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse.
|
||||
*
|
||||
* @param aggregation the source aggregation.
|
||||
* @param aggregationOperationContext can be {@literal null}.
|
||||
*/
|
||||
AggregationDefinition(Aggregation aggregation, @Nullable AggregationOperationContext aggregationOperationContext) {
|
||||
|
||||
this.aggregation = aggregation;
|
||||
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
} else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext) {
|
||||
this.inputType = ((TypeBasedAggregationOperationContext) aggregationOperationContext).getType();
|
||||
} else {
|
||||
this.inputType = null;
|
||||
}
|
||||
|
||||
this.aggregationOperationContext = Lazy.of(() -> aggregationOperationContext != null ? aggregationOperationContext
|
||||
: aggregationUtil.createAggregationContext(aggregation, getInputType()));
|
||||
this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link AggregationDefinition} extracting the input type from either the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or
|
||||
* the given {@literal aggregationOperationContext} if present. <br />
|
||||
* Creates a new {@link AggregationOperationContext} based on the {@link Aggregation} input type and the desired
|
||||
* {@link AggregationOptions#getDomainTypeMapping() domain type mapping}. <br />
|
||||
* Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse.
|
||||
*
|
||||
* @param aggregation the source aggregation.
|
||||
* @param inputType can be {@literal null}.
|
||||
*/
|
||||
AggregationDefinition(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
|
||||
this.aggregation = aggregation;
|
||||
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
} else {
|
||||
this.inputType = inputType;
|
||||
}
|
||||
|
||||
this.aggregationOperationContext = Lazy
|
||||
.of(() -> aggregationUtil.createAggregationContext(aggregation, getInputType()));
|
||||
this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the already mapped pipeline.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<Document> getAggregationPipeline() {
|
||||
return pipeline.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}.
|
||||
* @see AggregationPipeline#isOutOrMerge()
|
||||
*/
|
||||
boolean isOutOrMerge() {
|
||||
return aggregation.getPipeline().isOutOrMerge();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link AggregationOperationContext} used for mapping the pipeline.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
AggregationOperationContext getAggregationOperationContext() {
|
||||
return aggregationOperationContext.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the input type to map the pipeline against. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
Class<?> getInputType() {
|
||||
return inputType;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
@@ -58,22 +62,15 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio
|
||||
return new ReactiveAggregationSupport<>(template, domainType, null, null);
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ReactiveAggregationSupport<T>
|
||||
implements AggregationOperationWithAggregation<T>, ReactiveAggregation<T>, TerminatingAggregationOperation<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final Aggregation aggregation;
|
||||
private final String collection;
|
||||
|
||||
ReactiveAggregationSupport(ReactiveMongoTemplate template, Class<T> domainType, Aggregation aggregation,
|
||||
String collection) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.aggregation = aggregation;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
Aggregation aggregation;
|
||||
String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -71,7 +71,7 @@ public interface ReactiveChangeStreamOperation {
|
||||
/**
|
||||
* Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription}
|
||||
* is {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped.
|
||||
*/
|
||||
Flux<ChangeStreamEvent<T>> listen();
|
||||
|
||||
@@ -19,7 +19,6 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
@@ -39,14 +38,13 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Juergen Zimmermann
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFindOperation {
|
||||
@@ -91,10 +89,10 @@ public interface ReactiveFindOperation {
|
||||
* Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will
|
||||
* not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the
|
||||
* document at the "end" of the collection and then the application deletes that document.
|
||||
* <br />
|
||||
* <p />
|
||||
* A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the
|
||||
* streams will linger and exhaust resources. <br/>
|
||||
* <strong>NOTE:</strong> Requires a capped collection.
|
||||
@@ -106,12 +104,6 @@ public interface ReactiveFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but
|
||||
* guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications
|
||||
* needs use {@link ReactiveMongoOperations#estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @return {@link Mono} emitting total number of matching elements. Never {@literal null}.
|
||||
*/
|
||||
@@ -152,18 +144,6 @@ public interface ReactiveFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -279,21 +259,9 @@ public interface ReactiveFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,10 +15,15 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -34,15 +39,12 @@ import org.springframework.util.StringUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
ReactiveFindOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull ReactiveMongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -62,24 +64,16 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ReactiveFindSupport<T>
|
||||
implements ReactiveFind<T>, FindWithCollection<T>, FindWithProjection<T>, FindWithQuery<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final String collection;
|
||||
private final Query query;
|
||||
|
||||
ReactiveFindSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
String collection, Query query) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
}
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<?> domainType;
|
||||
Class<T> returnType;
|
||||
String collection;
|
||||
Query query;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -15,6 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@@ -30,13 +34,10 @@ import org.springframework.util.StringUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveInsertOperationSupport implements ReactiveInsertOperation {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
ReactiveInsertOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull ReactiveMongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -50,18 +51,13 @@ class ReactiveInsertOperationSupport implements ReactiveInsertOperation {
|
||||
return new ReactiveInsertSupport<>(template, domainType, null);
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ReactiveInsertSupport<T> implements ReactiveInsert<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final String collection;
|
||||
|
||||
ReactiveInsertSupport(ReactiveMongoTemplate template, Class<T> domainType, String collection) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -19,7 +19,6 @@ import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -147,18 +146,6 @@ public interface ReactiveMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
@@ -29,15 +31,12 @@ import org.springframework.util.StringUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
ReactiveMapReduceOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull ReactiveMongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
|
||||
@@ -22,7 +22,7 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user