Compare commits
238 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bf7b854d76 | ||
|
|
a06f128469 | ||
|
|
5504d6b6a2 | ||
|
|
adbe2113fb | ||
|
|
92d569f816 | ||
|
|
8c7dd788b4 | ||
|
|
6b4432a771 | ||
|
|
ddb56ab974 | ||
|
|
79c15341f3 | ||
|
|
a1200bb096 | ||
|
|
4f4951d94c | ||
|
|
3cffabaa8a | ||
|
|
fa70a76980 | ||
|
|
06c77426e7 | ||
|
|
8be58a1f49 | ||
|
|
4a141a251f | ||
|
|
7d642a3b0b | ||
|
|
1efa5ffc18 | ||
|
|
f17b699d25 | ||
|
|
4320baf636 | ||
|
|
ea0502bf0a | ||
|
|
0eadb05c0d | ||
|
|
e160240f46 | ||
|
|
6250c95af6 | ||
|
|
0324ae6706 | ||
|
|
4b6a058335 | ||
|
|
acdee76ff3 | ||
|
|
7240823a19 | ||
|
|
ed7705173c | ||
|
|
607730b2fe | ||
|
|
752544aa64 | ||
|
|
1e4cc2e0e4 | ||
|
|
5df59ba852 | ||
|
|
3d683f6f02 | ||
|
|
2b47f44531 | ||
|
|
79534fa426 | ||
|
|
b8e76ecae4 | ||
|
|
3e2b060611 | ||
|
|
4699219728 | ||
|
|
a483d95cde | ||
|
|
84f35f5655 | ||
|
|
2ec0f93325 | ||
|
|
48f9422a66 | ||
|
|
a2349405af | ||
|
|
e12ab354f7 | ||
|
|
04b20fa9c0 | ||
|
|
81c46f04d6 | ||
|
|
ce905c80fe | ||
|
|
831e4f9ef1 | ||
|
|
5ce293a871 | ||
|
|
bcd61f0dae | ||
|
|
478594c3ca | ||
|
|
bb101d5e18 | ||
|
|
a5bc7a2a08 | ||
|
|
6720967e19 | ||
|
|
99a4661e81 | ||
|
|
338bc30b96 | ||
|
|
7fa3f0068b | ||
|
|
abc74fdcc6 | ||
|
|
3a895588c8 | ||
|
|
f79d98ce23 | ||
|
|
2bcc0d8185 | ||
|
|
c8846d3d1c | ||
|
|
a4835c8fcf | ||
|
|
7875c8399f | ||
|
|
9046857721 | ||
|
|
e8bb63c9f7 | ||
|
|
b431a56a95 | ||
|
|
dc820017e0 | ||
|
|
34ce87b80c | ||
|
|
9098d509a5 | ||
|
|
861c8279a3 | ||
|
|
e545787e7e | ||
|
|
38ccdc5dfc | ||
|
|
7a34cc73d8 | ||
|
|
ba6fa834e5 | ||
|
|
7100cd17be | ||
|
|
7c65472e2d | ||
|
|
f98f586a23 | ||
|
|
19b5b6b6f0 | ||
|
|
b9ffa9b89d | ||
|
|
3ba589072f | ||
|
|
e237c5dfc4 | ||
|
|
ecb560cdbc | ||
|
|
fc4a21775a | ||
|
|
ae62e70c52 | ||
|
|
f83622709d | ||
|
|
83d218081c | ||
|
|
70fe406602 | ||
|
|
18046e9040 | ||
|
|
69310552e3 | ||
|
|
b8f093269d | ||
|
|
172db96fea | ||
|
|
c8381c734b | ||
|
|
bf82964474 | ||
|
|
2d0495874f | ||
|
|
82c91cbb71 | ||
|
|
4d309bd7f0 | ||
|
|
6f011b0fa1 | ||
|
|
1a3b9e3c42 | ||
|
|
5a37468103 | ||
|
|
d4b0963550 | ||
|
|
468c497525 | ||
|
|
4562f39d7a | ||
|
|
49957e8c6e | ||
|
|
b462b35284 | ||
|
|
445388bb5f | ||
|
|
61e9eac49b | ||
|
|
c219f6e7f2 | ||
|
|
1ab130ffca | ||
|
|
a4d6a0cf8a | ||
|
|
c28f725f48 | ||
|
|
a71f50f15c | ||
|
|
0ad715f806 | ||
|
|
ba559c223a | ||
|
|
5f3ad68114 | ||
|
|
28b18d25cb | ||
|
|
22c0e5029c | ||
|
|
4582d3152c | ||
|
|
d219e8ed7c | ||
|
|
ab7740faf5 | ||
|
|
0fba00311d | ||
|
|
33863999e6 | ||
|
|
ae18958955 | ||
|
|
489d637a00 | ||
|
|
1b5ce651be | ||
|
|
e035210917 | ||
|
|
57fc260c43 | ||
|
|
f9ec63425e | ||
|
|
aedb50d728 | ||
|
|
dbf4990f60 | ||
|
|
c5c43158c2 | ||
|
|
56ffe7913d | ||
|
|
eae263eebc | ||
|
|
0dd2fa3dce | ||
|
|
e648ea5903 | ||
|
|
f389812b7c | ||
|
|
2127ddcbb8 | ||
|
|
7f9ab3bb44 | ||
|
|
aea40ca490 | ||
|
|
fb8d03db31 | ||
|
|
890f08f19a | ||
|
|
be58472777 | ||
|
|
b082d4ad98 | ||
|
|
e80b031f54 | ||
|
|
50b017c08b | ||
|
|
78429eb33d | ||
|
|
3ed0bd7a18 | ||
|
|
cbc923c727 | ||
|
|
f6ca0049b6 | ||
|
|
82c9b0c662 | ||
|
|
3ca2349ce3 | ||
|
|
a76f157457 | ||
|
|
560a6a5bc2 | ||
|
|
51d5c52193 | ||
|
|
56b6748068 | ||
|
|
1e19f405cc | ||
|
|
54d2c122eb | ||
|
|
b47c5704e7 | ||
|
|
6b0b1cd97d | ||
|
|
35bbc604aa | ||
|
|
9ade830a10 | ||
|
|
8fbff50f4f | ||
|
|
14b49638a0 | ||
|
|
dc31f4f32f | ||
|
|
708f9ac7b3 | ||
|
|
17d6100426 | ||
|
|
27a4e25880 | ||
|
|
d378bcb442 | ||
|
|
f6505c7758 | ||
|
|
d25f88c70e | ||
|
|
cec6edfa26 | ||
|
|
3261936e8a | ||
|
|
d2d471d135 | ||
|
|
bcd2de000c | ||
|
|
c873e49d71 | ||
|
|
4ebcac19bc | ||
|
|
78212948bc | ||
|
|
38575baec1 | ||
|
|
f1a3c37a79 | ||
|
|
c668a47243 | ||
|
|
6a20ddf5a2 | ||
|
|
cec6526543 | ||
|
|
46ea58f3b9 | ||
|
|
ebaea8d22f | ||
|
|
ed6aaeed25 | ||
|
|
89b1b6fbb2 | ||
|
|
23769301b5 | ||
|
|
3399160acf | ||
|
|
32a8ee9b31 | ||
|
|
17cea70abc | ||
|
|
07731c39ba | ||
|
|
c5b580b82b | ||
|
|
9a1385186e | ||
|
|
704524d7f4 | ||
|
|
cc9a3ac8da | ||
|
|
acb68f3ca4 | ||
|
|
3088f0469e | ||
|
|
a1ae04881d | ||
|
|
6f55c66060 | ||
|
|
f86447bd04 | ||
|
|
1bb4324b2e | ||
|
|
856506f121 | ||
|
|
2a81dc75a8 | ||
|
|
58cd4c08ca | ||
|
|
344e019143 | ||
|
|
918b7e96bb | ||
|
|
fce7a5c1cb | ||
|
|
dbd2de8e0f | ||
|
|
0dbe331ab0 | ||
|
|
846ebcd91d | ||
|
|
9e0b5caeac | ||
|
|
cf70f5e5eb | ||
|
|
331dc6df6f | ||
|
|
a51dce2c90 | ||
|
|
c0cf1aa95b | ||
|
|
7104ffa543 | ||
|
|
28d2fb6680 | ||
|
|
140e26946f | ||
|
|
f4e730ce87 | ||
|
|
e3a83ebc42 | ||
|
|
f65c1e324e | ||
|
|
1dd0061f03 | ||
|
|
5ea860700c | ||
|
|
3dd653a702 | ||
|
|
f87847407b | ||
|
|
433a125c9e | ||
|
|
5827cb0971 | ||
|
|
0109bf6858 | ||
|
|
49d1555576 | ||
|
|
fdbb305b8e | ||
|
|
49dd03311a | ||
|
|
a86a3210e1 | ||
|
|
4b655abfb6 | ||
|
|
0963e6cf77 | ||
|
|
3e1b2c4bdb | ||
|
|
03e0e0c431 | ||
|
|
51900021a1 |
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
@@ -1,110 +0,0 @@
|
||||
/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/
|
||||
|
||||
import java.net.*;
|
||||
import java.io.*;
|
||||
import java.nio.channels.*;
|
||||
import java.util.Properties;
|
||||
|
||||
public class MavenWrapperDownloader {
|
||||
|
||||
/**
|
||||
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
|
||||
*/
|
||||
private static final String DEFAULT_DOWNLOAD_URL =
|
||||
"https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar";
|
||||
|
||||
/**
|
||||
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
|
||||
* use instead of the default one.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
|
||||
".mvn/wrapper/maven-wrapper.properties";
|
||||
|
||||
/**
|
||||
* Path where the maven-wrapper.jar will be saved to.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_JAR_PATH =
|
||||
".mvn/wrapper/maven-wrapper.jar";
|
||||
|
||||
/**
|
||||
* Name of the property which should be used to override the default download url for the wrapper.
|
||||
*/
|
||||
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
|
||||
|
||||
public static void main(String args[]) {
|
||||
System.out.println("- Downloader started");
|
||||
File baseDirectory = new File(args[0]);
|
||||
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
|
||||
|
||||
// If the maven-wrapper.properties exists, read it and check if it contains a custom
|
||||
// wrapperUrl parameter.
|
||||
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
|
||||
String url = DEFAULT_DOWNLOAD_URL;
|
||||
if(mavenWrapperPropertyFile.exists()) {
|
||||
FileInputStream mavenWrapperPropertyFileInputStream = null;
|
||||
try {
|
||||
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
|
||||
Properties mavenWrapperProperties = new Properties();
|
||||
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
|
||||
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
|
||||
} catch (IOException e) {
|
||||
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
|
||||
} finally {
|
||||
try {
|
||||
if(mavenWrapperPropertyFileInputStream != null) {
|
||||
mavenWrapperPropertyFileInputStream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Ignore ...
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading from: : " + url);
|
||||
|
||||
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
|
||||
if(!outputFile.getParentFile().exists()) {
|
||||
if(!outputFile.getParentFile().mkdirs()) {
|
||||
System.out.println(
|
||||
"- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'");
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
|
||||
try {
|
||||
downloadFileFromURL(url, outputFile);
|
||||
System.out.println("Done");
|
||||
System.exit(0);
|
||||
} catch (Throwable e) {
|
||||
System.out.println("- Error downloading");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
|
||||
URL website = new URL(urlString);
|
||||
ReadableByteChannel rbc;
|
||||
rbc = Channels.newChannel(website.openStream());
|
||||
FileOutputStream fos = new FileOutputStream(destination);
|
||||
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
|
||||
fos.close();
|
||||
rbc.close();
|
||||
}
|
||||
|
||||
}
|
||||
BIN
.mvn/wrapper/maven-wrapper.jar
vendored
BIN
.mvn/wrapper/maven-wrapper.jar
vendored
Binary file not shown.
1
.mvn/wrapper/maven-wrapper.properties
vendored
1
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1 +0,0 @@
|
||||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
|
||||
@@ -16,12 +16,9 @@ before_install:
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- MONGO_VERSION=4.1.10
|
||||
- MONGO_VERSION=4.0.4
|
||||
- MONGO_VERSION=3.6.12
|
||||
- MONGO_VERSION=3.4.20
|
||||
global:
|
||||
- PROFILE=ci
|
||||
global:
|
||||
- MONGO_VERSION=3.7.9
|
||||
|
||||
addons:
|
||||
apt:
|
||||
|
||||
@@ -24,4 +24,4 @@ Instances of abusive, harassing, or otherwise unacceptable behavior may be repor
|
||||
All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
|
||||
Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
This Code of Conduct is adapted from the https://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at https://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/].
|
||||
This Code of Conduct is adapted from the http://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at http://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/].
|
||||
144
Jenkinsfile
vendored
144
Jenkinsfile
vendored
@@ -1,144 +0,0 @@
|
||||
pipeline {
|
||||
agent none
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
}
|
||||
agent any
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.1') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.1/**"
|
||||
}
|
||||
agent any
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.1", "ci/openjdk8-mongodb-4.1/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
args '-v $HOME/.m2:/root/.m2'
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh './mvnw clean dependency:list test -Dsort -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("Test other configurations") {
|
||||
parallel {
|
||||
stage("test: mongodb 4.1") {
|
||||
agent {
|
||||
docker {
|
||||
label 'data'
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.1:latest'
|
||||
args '-v $HOME/.m2:/root/.m2'
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh './mvnw clean dependency:list test -Dsort -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
branch 'issue/*'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
args '-v $HOME/.m2:/root/.m2'
|
||||
}
|
||||
}
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh "./mvnw -Pci,snapshot -Dmaven.test.skip=true clean deploy -B"
|
||||
}
|
||||
}
|
||||
|
||||
stage('Release to artifactory with docs') {
|
||||
when {
|
||||
branch 'master'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
args '-v $HOME/.m2:/root/.m2'
|
||||
}
|
||||
}
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh "./mvnw -Pci,snapshot -Dmaven.test.skip=true clean deploy -B"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
changed {
|
||||
script {
|
||||
slackSend(
|
||||
color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger',
|
||||
channel: '#spring-data-dev',
|
||||
message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}")
|
||||
emailext(
|
||||
subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}",
|
||||
mimeType: 'text/html',
|
||||
recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']],
|
||||
body: "<a href=\"${env.BUILD_URL}\">${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}</a>")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
234
README.adoc
234
README.adoc
@@ -1,234 +0,0 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
= Spring Data MongoDB
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer.
|
||||
|
||||
== Getting Help
|
||||
|
||||
For a comprehensive treatment of all the Spring Data MongoDB features, please refer to:
|
||||
|
||||
* the https://docs.spring.io/spring-data/mongodb/docs/current/reference/html/[User Guide]
|
||||
* the https://docs.spring.io/spring-data/mongodb/docs/current/api/[JavaDocs] have extensive comments in them as well.
|
||||
* the home page of https://projects.spring.io/spring-data-mongodb[Spring Data MongoDB] contains links to articles and other resources.
|
||||
* for more detailed questions, use https://stackoverflow.com/questions/tagged/spring-data-mongodb[Spring Data Mongodb on Stackoverflow].
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about https://projects.spring.io/[Spring projects].
|
||||
|
||||
== Quick Start
|
||||
|
||||
=== Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
----
|
||||
|
||||
=== MongoTemplate
|
||||
|
||||
MongoTemplate is the central support class for Mongo database operations. It provides:
|
||||
|
||||
* Basic POJO mapping support to and from BSON
|
||||
* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.)
|
||||
* Connection affinity callback
|
||||
* Exception translation into Spring's https://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions[technology agnostic DAO exception hierarchy].
|
||||
|
||||
=== Spring Data repositories
|
||||
|
||||
To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface.
|
||||
|
||||
For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends CrudRepository<Person, Long> {
|
||||
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
List<Person> findByFirstnameLike(String firstname);
|
||||
}
|
||||
----
|
||||
|
||||
The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them.
|
||||
|
||||
You can have Spring automatically create a proxy for the interface by using the following JavaConfig:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "springdata";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<bean id="template" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg>
|
||||
<bean class="com.mongodb.MongoClient">
|
||||
<constructor-arg value="localhost" />
|
||||
<constructor-arg value="27017" />
|
||||
</bean>
|
||||
</constructor-arg>
|
||||
<constructor-arg value="database" />
|
||||
</bean>
|
||||
|
||||
<mongo:repositories base-package="com.acme.repository" />
|
||||
----
|
||||
|
||||
This will find the repository interface and register a proxy object in the container. You can use it as shown below:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Service
|
||||
public class MyService {
|
||||
|
||||
private final PersonRepository repository;
|
||||
|
||||
@Autowired
|
||||
public MyService(PersonRepository repository) {
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
Person person = new Person();
|
||||
person.setFirstname("Oliver");
|
||||
person.setLastname("Gierke");
|
||||
person = repository.save(person);
|
||||
|
||||
List<Person> lastNameResults = repository.findByLastname("Gierke");
|
||||
List<Person> firstNameResults = repository.findByFirstnameLike("Oli*");
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
=== MongoDB 4.0 Transactions
|
||||
|
||||
As of version 4 MongoDB supports https://www.mongodb.com/transactions[Transactions]. Transactions are built on top of
|
||||
`ClientSessions` and therefore require an active session.
|
||||
|
||||
`MongoTransactionManager` is the gateway to the well known Spring transaction support. It allows applications to use
|
||||
https://docs.spring.io/spring/docs/current/spring-framework-reference/html/transaction.html[managed transaction features of Spring].
|
||||
The `MongoTransactionManager` binds a `ClientSession` to the thread. `MongoTemplate` automatically detects those and operates on them accordingly.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
@Configuration
|
||||
static class Config extends AbstractMongoConfiguration {
|
||||
|
||||
@Bean
|
||||
MongoTransactionManager transactionManager(MongoDbFactory dbFactory) {
|
||||
return new MongoTransactionManager(dbFactory);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
|
||||
@Component
|
||||
public class StateService {
|
||||
|
||||
@Transactional
|
||||
void someBusinessFunction(Step step) {
|
||||
|
||||
template.insert(step);
|
||||
|
||||
process(step);
|
||||
|
||||
template.update(Step.class).apply(Update.set("state", // ...
|
||||
};
|
||||
});
|
||||
----
|
||||
|
||||
== Contributing to Spring Data
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
* Get involved with the Spring community on Stackoverflow and help out on the https://stackoverflow.com/questions/tagged/spring-data-mongodb[spring-data-mongodb] tag by responding to questions and joining the debate.
|
||||
* Create https://jira.spring.io/browse/DATAMONGO[JIRA] tickets for bugs and new features and comment and vote on the ones that you are interested in.
|
||||
* Github is for social coding: if you want to write code, we encourage contributions through pull requests from https://help.github.com/forking/[forks of this repository]. If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing.
|
||||
* Watch for upcoming articles on Spring by https://spring.io/blog[subscribing] to spring.io.
|
||||
|
||||
Before we accept a non-trivial patch or pull request we will need you to https://cla.pivotal.io/sign/spring[sign the Contributor License Agreement]. Signing the contributor’s agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. If you forget to do so, you'll be reminded when you submit a pull request. Active contributors might be asked to join the core team, and given the ability to merge pull requests.
|
||||
|
||||
== Running CI tasks locally
|
||||
|
||||
Since this pipeline is purely Docker-based, it's easy to:
|
||||
|
||||
* Debug what went wrong on your local machine.
|
||||
* Test out a a tweak to your test routine before sending it out.
|
||||
* Experiment against a new image before submitting your pull request.
|
||||
|
||||
All of these use cases are great reasons to essentially run what the CI server does on your local machine.
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
2. `cd spring-data-mongodb-github`
|
||||
+
|
||||
Next, run the tests from inside the container:
|
||||
+
|
||||
3. `./mvnw clean dependency:list test -Dsort -Dbundlor.enabled=false -B` (or with whatever profile you need to test out)
|
||||
|
||||
Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs.
|
||||
|
||||
If you need to package things up, do this:
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
2. `cd spring-data-mongodb-github`
|
||||
+
|
||||
Next, package things from inside the container doing this:
|
||||
+
|
||||
3. `./mvnw clean dependency:list package -Dsort -Dbundlor.enabled=false -B`
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
150
README.md
Normal file
150
README.md
Normal file
@@ -0,0 +1,150 @@
|
||||
[](http://projects.spring.io/spring-data-mongodb#quick-start)
|
||||
[](http://projects.spring.io/spring-data-mongodb#quick-start)
|
||||
|
||||
# Spring Data MongoDB
|
||||
|
||||
The primary goal of the [Spring Data](http://projects.spring.io/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer.
|
||||
|
||||
## Getting Help
|
||||
|
||||
For a comprehensive treatment of all the Spring Data MongoDB features, please refer to:
|
||||
|
||||
* the [User Guide](http://docs.spring.io/spring-data/mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](http://projects.spring.io/spring-data-mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](http://stackoverflow.com/questions/tagged/spring-data-mongodb).
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://projects.spring.io/).
|
||||
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>http://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
```
|
||||
|
||||
### MongoTemplate
|
||||
|
||||
MongoTemplate is the central support class for Mongo database operations. It provides:
|
||||
|
||||
* Basic POJO mapping support to and from BSON
|
||||
* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.)
|
||||
* Connection affinity callback
|
||||
* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions).
|
||||
|
||||
### Spring Data repositories
|
||||
|
||||
To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface.
|
||||
|
||||
For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below:
|
||||
|
||||
```java
|
||||
public interface PersonRepository extends CrudRepository<Person, Long> {
|
||||
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
List<Person> findByFirstnameLike(String firstname);
|
||||
}
|
||||
```
|
||||
|
||||
The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them.
|
||||
|
||||
You can have Spring automatically create a proxy for the interface by using the following JavaConfig:
|
||||
|
||||
```java
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "springdata";
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML:
|
||||
|
||||
```xml
|
||||
<bean id="template" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg>
|
||||
<bean class="com.mongodb.MongoClient">
|
||||
<constructor-arg value="localhost" />
|
||||
<constructor-arg value="27017" />
|
||||
</bean>
|
||||
</constructor-arg>
|
||||
<constructor-arg value="database" />
|
||||
</bean>
|
||||
|
||||
<mongo:repositories base-package="com.acme.repository" />
|
||||
```
|
||||
|
||||
This will find the repository interface and register a proxy object in the container. You can use it as shown below:
|
||||
|
||||
```java
|
||||
@Service
|
||||
public class MyService {
|
||||
|
||||
private final PersonRepository repository;
|
||||
|
||||
@Autowired
|
||||
public MyService(PersonRepository repository) {
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
Person person = new Person();
|
||||
person.setFirstname("Oliver");
|
||||
person.setLastname("Gierke");
|
||||
person = repository.save(person);
|
||||
|
||||
List<Person> lastNameResults = repository.findByLastname("Gierke");
|
||||
List<Person> firstNameResults = repository.findByFirstnameLike("Oli*");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Contributing to Spring Data
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](http://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate.
|
||||
* Create [JIRA](https://jira.spring.io/browse/DATAMONGO) tickets for bugs and new features and comment and vote on the ones that you are interested in.
|
||||
* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing.
|
||||
* Watch for upcoming articles on Spring by [subscribing](http://spring.io/blog) to spring.io.
|
||||
|
||||
Before we accept a non-trivial patch or pull request we will need you to [sign the Contributor License Agreement](https://cla.pivotal.io/sign/spring). Signing the contributor’s agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. If you forget to do so, you'll be reminded when you submit a pull request. Active contributors might be asked to join the core team, and given the ability to merge pull requests.
|
||||
@@ -1,9 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Please see the https://spring.io/projects/spring-data-mongodb[Spring Data MongoDB] project page for supported versions.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly.
|
||||
@@ -1,39 +0,0 @@
|
||||
== Running CI tasks locally
|
||||
|
||||
Since Concourse is built on top of Docker, it's easy to:
|
||||
|
||||
* Debug what went wrong on your local machine.
|
||||
* Test out a a tweak to your `test.sh` script before sending it out.
|
||||
* Experiment against a new image before submitting your pull request.
|
||||
|
||||
All of these use cases are great reasons to essentially run what Concourse does on your local machine.
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
Next, run the `test.sh` script from inside the container:
|
||||
+
|
||||
2. `PROFILE=none spring-data-mongodb-github/ci/test.sh`
|
||||
|
||||
Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs.
|
||||
|
||||
If you need to test the `build.sh` script, do this:
|
||||
|
||||
1. `mkdir /tmp/spring-data-mongodb-artifactory`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
|
||||
artifactory output directory at `spring-data-mongodb-artifactory`.
|
||||
+
|
||||
Next, run the `build.sh` script from inside the container:
|
||||
+
|
||||
3. `spring-data-mongodb-github/ci/build.sh`
|
||||
|
||||
IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about accidentally deploying anything.
|
||||
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
|
||||
and deliver them to artifactory.
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.9 mongodb-org-server=4.0.9 mongodb-org-shell=4.0.9 mongodb-org-mongos=4.0.9 mongodb-org-tools=4.0.9
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 4B7C549A058F8B6B
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.1 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.1.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org-unstable=4.1.13 mongodb-org-unstable-server=4.1.13 mongodb-org-unstable-shell=4.1.13 mongodb-org-unstable-mongos=4.1.13 mongodb-org-unstable-tools=4.1.13
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
286
mvnw
vendored
286
mvnw
vendored
@@ -1,286 +0,0 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven2 Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Mingw, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
# TODO classpath?
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
##########################################################################################
|
||||
# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
|
||||
# This allows using the maven wrapper in projects that prohibit checking in binary data.
|
||||
##########################################################################################
|
||||
if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found .mvn/wrapper/maven-wrapper.jar"
|
||||
fi
|
||||
else
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
|
||||
fi
|
||||
jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
|
||||
while IFS="=" read key value; do
|
||||
case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
|
||||
esac
|
||||
done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Downloading from: $jarUrl"
|
||||
fi
|
||||
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
|
||||
|
||||
if command -v wget > /dev/null; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found wget ... using wget"
|
||||
fi
|
||||
wget "$jarUrl" -O "$wrapperJarPath"
|
||||
elif command -v curl > /dev/null; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found curl ... using curl"
|
||||
fi
|
||||
curl -o "$wrapperJarPath" "$jarUrl"
|
||||
else
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Falling back to using Java to download"
|
||||
fi
|
||||
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
|
||||
if [ -e "$javaClass" ]; then
|
||||
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo " - Compiling MavenWrapperDownloader.java ..."
|
||||
fi
|
||||
# Compiling the Java class
|
||||
("$JAVA_HOME/bin/javac" "$javaClass")
|
||||
fi
|
||||
if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
|
||||
# Running the downloader
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo " - Running MavenWrapperDownloader.java ..."
|
||||
fi
|
||||
("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
##########################################################################################
|
||||
# End of extension
|
||||
##########################################################################################
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
fi
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
161
mvnw.cmd
vendored
161
mvnw.cmd
vendored
@@ -1,161 +0,0 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM https://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Maven2 Start Up Batch script
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM M2_HOME - location of maven2's installed home dir
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM set title of command window
|
||||
title %0
|
||||
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
|
||||
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
|
||||
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
|
||||
FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
|
||||
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
|
||||
)
|
||||
|
||||
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
|
||||
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
|
||||
if exist %WRAPPER_JAR% (
|
||||
echo Found %WRAPPER_JAR%
|
||||
) else (
|
||||
echo Couldn't find %WRAPPER_JAR%, downloading it ...
|
||||
echo Downloading from: %DOWNLOAD_URL%
|
||||
powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
|
||||
echo Finished downloading %WRAPPER_JAR%
|
||||
)
|
||||
@REM End of extension
|
||||
|
||||
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
|
||||
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
80
pom.xml
80
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.RC1</version>
|
||||
<version>2.0.15.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,20 +15,21 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.2.0.RC1</version>
|
||||
<version>2.0.15.BUILD-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
</modules>
|
||||
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.2.0.RC1</springdata.commons>
|
||||
<mongo>3.11.0-beta3</mongo>
|
||||
<mongo.reactivestreams>1.11.0</mongo.reactivestreams>
|
||||
<springdata.commons>2.0.15.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>3.5.0</mongo>
|
||||
<mongo.reactivestreams>1.6.0</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -114,46 +115,6 @@
|
||||
|
||||
<profiles>
|
||||
|
||||
<profile>
|
||||
<id>snapshot</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.jfrog.buildinfo</groupId>
|
||||
<artifactId>artifactory-maven-plugin</artifactId>
|
||||
<version>2.6.1</version>
|
||||
<inherited>false</inherited>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>build-info</id>
|
||||
<goals>
|
||||
<goal>publish</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<buildInfo>
|
||||
<buildUrl>{{BUILD_URL}}</buildUrl>
|
||||
</buildInfo>
|
||||
<deployProperties>
|
||||
<zip.name>spring-data-mongodb</zip.name>
|
||||
<zip.displayname>spring-data-mongodb</zip.displayname>
|
||||
<zip.deployed>false</zip.deployed>
|
||||
<archives>*:*:*:*@zip</archives>
|
||||
</deployProperties>
|
||||
<publisher>
|
||||
<contextUrl>https://repo.spring.io</contextUrl>
|
||||
<username>{{ARTIFACTORY_USR}}</username>
|
||||
<password>{{ARTIFACTORY_PSW}}</password>
|
||||
<repoKey>libs-snapshot-local</repoKey>
|
||||
<snapshotRepoKey>libs-snapshot-local</snapshotRepoKey>
|
||||
</publisher>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>release</id>
|
||||
<build>
|
||||
@@ -171,10 +132,30 @@
|
||||
<id>benchmarks</id>
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
<module>spring-data-mongodb-benchmarks</module>
|
||||
</modules>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>distribute</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
@@ -188,8 +169,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
@@ -198,11 +179,6 @@
|
||||
<id>spring-plugins-release</id>
|
||||
<url>https://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</pluginRepository>
|
||||
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Benchmarks
|
||||
|
||||
Benchmarks are based on [JMH](https://openjdk.java.net/projects/code-tools/jmh/).
|
||||
Benchmarks are based on [JMH](http://openjdk.java.net/projects/code-tools/jmh/).
|
||||
|
||||
# Running Benchmarks
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.RC1</version>
|
||||
<version>2.0.15.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,7 +87,6 @@
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<testSourceDirectory>${project.build.sourceDirectory}</testSourceDirectory>
|
||||
<testClassesDirectory>${project.build.outputDirectory}</testClassesDirectory>
|
||||
<excludes>
|
||||
|
||||
7
spring-data-mongodb-cross-store/aop.xml
Normal file
7
spring-data-mongodb-cross-store/aop.xml
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<aspectj>
|
||||
<aspects>
|
||||
<aspect name="org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect" />
|
||||
<aspect name="org.springframework.data.mongodb.crossstore.MongoDocumentBacking" />
|
||||
</aspects>
|
||||
</aspectj>
|
||||
148
spring-data-mongodb-cross-store/pom.xml
Normal file
148
spring-data-mongodb-cross-store/pom.xml
Normal file
@@ -0,0 +1,148 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.15.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>2.1.1</jpa>
|
||||
<hibernate>5.2.1.Final</hibernate>
|
||||
<java-module-name>spring.data.mongodb.cross.store</java-module-name>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>2.0.15.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse.persistence</groupId>
|
||||
<artifactId>javax.persistence</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- For Tests -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-entitymanager</artifactId>
|
||||
<version>${hibernate}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<version>1.8.0.10</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
<version>${validation}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.2.4.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>aspectj-maven-plugin</artifactId>
|
||||
<version>1.6</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjtools</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
<goal>test-compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<outxml>true</outxml>
|
||||
<aspectLibraries>
|
||||
<aspectLibrary>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</aspectLibrary>
|
||||
</aspectLibraries>
|
||||
<complianceLevel>${source.level}</complianceLevel>
|
||||
<source>${source.level}</source>
|
||||
<target>${source.level}</target>
|
||||
<xmlConfigured>aop.xml</xmlConfigured>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,15 +13,16 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.monitor;
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import java.util.function.Supplier;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2018/01
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
interface Resumeable<T> {
|
||||
@Deprecated
|
||||
public interface DocumentBacked extends ChangeSetBacked {
|
||||
|
||||
void resumeAt(Supplier<T> token);
|
||||
}
|
||||
@@ -0,0 +1,214 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.Filters;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
this.mongoTemplate = mongoTemplate;
|
||||
}
|
||||
|
||||
public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) {
|
||||
this.entityManagerFactory = entityManagerFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass, Object id, final ChangeSet changeSet)
|
||||
throws DataAccessException, NotFoundException {
|
||||
|
||||
if (id == null) {
|
||||
log.debug("Unable to load MongoDB data for null id");
|
||||
return;
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entityClass);
|
||||
|
||||
final Document dbk = new Document();
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
|
||||
for (Document dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (cs == null) {
|
||||
log.debug("Flush: changeset was null, nothing to flush.");
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
if (mongoTemplate.getCollection(collName) == null) {
|
||||
mongoTemplate.createCollection(collName);
|
||||
}
|
||||
|
||||
for (String key : cs.getValues().keySet()) {
|
||||
if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) {
|
||||
Object value = cs.getValues().get(key);
|
||||
final Document dbQuery = new Document();
|
||||
dbQuery.put(ENTITY_ID, getPersistentId(entity, cs));
|
||||
dbQuery.put(ENTITY_CLASS, entity.getClass().getName());
|
||||
dbQuery.put(ENTITY_FIELD_NAME, key);
|
||||
final Document dbId = mongoTemplate.execute(collName, new CollectionCallback<Document>() {
|
||||
public Document doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
Document id = collection.find(dbQuery).first();
|
||||
return id;
|
||||
}
|
||||
});
|
||||
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
DeleteResult dr = collection.deleteMany(dbQuery);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
final Document dbDoc = new Document();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
if (dbId != null) {
|
||||
dbDoc.put("_id", dbId.get("_id"));
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
|
||||
if (dbId != null) {
|
||||
collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc);
|
||||
} else {
|
||||
|
||||
if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) {
|
||||
dbDoc.remove("_id");
|
||||
}
|
||||
collection.insertOne(dbDoc);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection the given entity type shall be persisted to.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getCollectionNameForEntity(Class<? extends ChangeSetBacked> entityClass) {
|
||||
return mongoTemplate.getCollectionName(entityClass);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.Transient;
|
||||
import javax.persistence.Entity;
|
||||
|
||||
import org.aspectj.lang.JoinPoint;
|
||||
import org.aspectj.lang.reflect.FieldSignature;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
import org.springframework.data.mongodb.crossstore.DocumentBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetBackedTransactionSynchronization;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister.NotFoundException;
|
||||
import org.springframework.data.crossstore.HashMapChangeSet;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
/**
|
||||
* Aspect to turn an object annotated with @Document into a persistent document using Mongo.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public aspect MongoDocumentBacking {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoDocumentBacking.class);
|
||||
|
||||
// Aspect shared config
|
||||
private ChangeSetPersister<Object> changeSetPersister;
|
||||
|
||||
public void setChangeSetPersister(ChangeSetPersister<Object> changeSetPersister) {
|
||||
this.changeSetPersister = changeSetPersister;
|
||||
}
|
||||
|
||||
// ITD to introduce N state to Annotated objects
|
||||
declare parents : (@Entity *) implements DocumentBacked;
|
||||
|
||||
// The annotated fields that will be persisted in MongoDB rather than with JPA
|
||||
declare @field: @RelatedDocument * (@Entity+ *).*:@Transient;
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Advise user-defined constructors of ChangeSetBacked objects to create a new
|
||||
// backing ChangeSet
|
||||
// -------------------------------------------------------------------------
|
||||
pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) :
|
||||
execution((DocumentBacked+).new(..)) &&
|
||||
!execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity);
|
||||
|
||||
pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) :
|
||||
execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity) &&
|
||||
args(cs);
|
||||
|
||||
protected pointcut entityFieldGet(DocumentBacked entity) :
|
||||
get(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
!get(* DocumentBacked.*);
|
||||
|
||||
protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) :
|
||||
set(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
args(newVal) &&
|
||||
!set(* DocumentBacked.*);
|
||||
|
||||
// intercept EntityManager.merge calls
|
||||
public pointcut entityManagerMerge(EntityManager em, Object entity) :
|
||||
call(* EntityManager.merge(Object)) &&
|
||||
target(em) &&
|
||||
args(entity);
|
||||
|
||||
// intercept EntityManager.remove calls
|
||||
// public pointcut entityManagerRemove(EntityManager em, Object entity) :
|
||||
// call(* EntityManager.remove(Object)) &&
|
||||
// target(em) &&
|
||||
// args(entity);
|
||||
|
||||
// move changeSet from detached entity to the newly merged persistent object
|
||||
Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) {
|
||||
Object mergedEntity = proceed(em, entity);
|
||||
if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) {
|
||||
((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet();
|
||||
}
|
||||
return mergedEntity;
|
||||
}
|
||||
|
||||
// clear changeSet from removed entity
|
||||
// Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) {
|
||||
// if (entity instanceof DocumentBacked) {
|
||||
// removeChangeSetValues((DocumentBacked)entity);
|
||||
// }
|
||||
// return proceed(em, entity);
|
||||
// }
|
||||
|
||||
private static void removeChangeSetValues(DocumentBacked entity) {
|
||||
LOGGER.debug("Removing all change-set values for " + entity);
|
||||
ChangeSet nulledCs = new HashMapChangeSet();
|
||||
DocumentBacked documentEntity = (DocumentBacked) entity;
|
||||
@SuppressWarnings("unchecked")
|
||||
ChangeSetPersister<Object> changeSetPersister = (ChangeSetPersister<Object>) documentEntity.itdChangeSetPersister;
|
||||
try {
|
||||
changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(),
|
||||
documentEntity.getChangeSet());
|
||||
} catch (DataAccessException e) {
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
for (String key : entity.getChangeSet().getValues().keySet()) {
|
||||
nulledCs.set(key, null);
|
||||
}
|
||||
entity.setChangeSet(nulledCs);
|
||||
}
|
||||
|
||||
before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) {
|
||||
LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass());
|
||||
// Populate all ITD fields
|
||||
entity.setChangeSet(new HashMapChangeSet());
|
||||
entity.itdChangeSetPersister = changeSetPersister;
|
||||
entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity);
|
||||
// registerTransactionSynchronization(entity);
|
||||
}
|
||||
|
||||
private static void registerTransactionSynchronization(DocumentBacked entity) {
|
||||
if (TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Adding transaction synchronization for " + entity);
|
||||
}
|
||||
TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization already active for " + entity);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization is not active for " + entity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// ChangeSet-related mixins
|
||||
// -------------------------------------------------------------------------
|
||||
// Introduced field
|
||||
@Transient
|
||||
private ChangeSet DocumentBacked.changeSet;
|
||||
|
||||
@Transient
|
||||
private ChangeSetPersister<?> DocumentBacked.itdChangeSetPersister;
|
||||
|
||||
@Transient
|
||||
private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization;
|
||||
|
||||
public void DocumentBacked.setChangeSet(ChangeSet cs) {
|
||||
this.changeSet = cs;
|
||||
}
|
||||
|
||||
public ChangeSet DocumentBacked.getChangeSet() {
|
||||
return changeSet;
|
||||
}
|
||||
|
||||
// Flush the entity state to the persistent store
|
||||
public void DocumentBacked.flush() {
|
||||
Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
itdChangeSetPersister.persistState(this, this.changeSet);
|
||||
}
|
||||
|
||||
public Object DocumentBacked.get_persistent_id() {
|
||||
return itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
}
|
||||
|
||||
// lifecycle methods
|
||||
@javax.persistence.PostPersist
|
||||
public void DocumentBacked.itdPostPersist() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName());
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PreUpdate
|
||||
public void DocumentBacked.itdPreUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostUpdate
|
||||
public void DocumentBacked.itdPostUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostRemove
|
||||
public void DocumentBacked.itdPostRemove() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
removeChangeSetValues(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostLoad
|
||||
public void DocumentBacked.itdPostLoad() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field reads to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity): entityFieldGet(entity) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet());
|
||||
if (entity.getChangeSet().getValues().get(propName) == null) {
|
||||
try {
|
||||
this.changeSetPersister
|
||||
.getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet());
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
}
|
||||
Object fValue = entity.getChangeSet().getValues().get(propName);
|
||||
if (fValue != null) {
|
||||
return fValue;
|
||||
}
|
||||
return proceed(entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field writes to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]");
|
||||
entity.getChangeSet().set(propName, newVal);
|
||||
return proceed(entity, newVal);
|
||||
}
|
||||
|
||||
Field field(JoinPoint joinPoint) {
|
||||
FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature();
|
||||
return fieldSignature.getField();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,15 +13,19 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import lombok.Value;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Value
|
||||
class SumAge {
|
||||
|
||||
private Long total;
|
||||
@Deprecated
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD })
|
||||
public @interface RelatedDocument {
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Infrastructure for Spring Data's MongoDB cross store support.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
@@ -0,0 +1,195 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.crossstore.test.Address;
|
||||
import org.springframework.data.mongodb.crossstore.test.Person;
|
||||
import org.springframework.data.mongodb.crossstore.test.Resume;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
/**
|
||||
* Integration tests for MongoDB cross-store persistence (mainly {@link MongoChangeSetPersister}).
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:/META-INF/spring/applicationContext.xml")
|
||||
public class CrossStoreMongoTests {
|
||||
|
||||
@Autowired MongoTemplate mongoTemplate;
|
||||
|
||||
@PersistenceContext EntityManager entityManager;
|
||||
|
||||
@Autowired PlatformTransactionManager transactionManager;
|
||||
TransactionTemplate txTemplate;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
txTemplate = new TransactionTemplate(transactionManager);
|
||||
|
||||
clearData(Person.class);
|
||||
|
||||
Address address = new Address(12, "MAin St.", "Boston", "MA", "02101");
|
||||
|
||||
Resume resume = new Resume();
|
||||
resume.addEducation("Skanstulls High School, 1975");
|
||||
resume.addEducation("Univ. of Stockholm, 1980");
|
||||
resume.addJob("DiMark, DBA, 1990-2000");
|
||||
resume.addJob("VMware, Developer, 2007-");
|
||||
|
||||
final Person person = new Person("Thomas", 20);
|
||||
person.setAddress(address);
|
||||
person.setResume(resume);
|
||||
person.setId(1L);
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.persist(person);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.remove(entityManager.find(Person.class, 1L));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void clearData(Class<?> domainType) {
|
||||
|
||||
String collectionName = mongoTemplate.getCollectionName(domainType);
|
||||
mongoTemplate.dropCollection(collectionName);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testReadJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testUpdatedJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
found.setAge(44);
|
||||
found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006");
|
||||
|
||||
entityManager.merge(found);
|
||||
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; "
|
||||
+ "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeJpaEntityWithMongoDocument() {
|
||||
|
||||
final Person detached = entityManager.find(Person.class, 1L);
|
||||
entityManager.detach(detached);
|
||||
detached.getResume().addJob("TargetRx, Developer, 2000-2005");
|
||||
|
||||
Person merged = txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person result = entityManager.merge(detached);
|
||||
entityManager.flush();
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
Assert.assertTrue(detached.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
Assert.assertTrue(merged.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
final Person updated = entityManager.find(Person.class, 1L);
|
||||
Assert.assertTrue(updated.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveJpaEntityWithMongoDocument() {
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person p2 = new Person("Thomas", 20);
|
||||
Resume r2 = new Resume();
|
||||
r2.addEducation("Skanstulls High School, 1975");
|
||||
r2.addJob("DiMark, DBA, 1990-2000");
|
||||
p2.setResume(r2);
|
||||
p2.setId(2L);
|
||||
entityManager.persist(p2);
|
||||
Person p3 = new Person("Thomas", 20);
|
||||
Resume r3 = new Resume();
|
||||
r3.addEducation("Univ. of Stockholm, 1980");
|
||||
r3.addJob("VMware, Developer, 2007-");
|
||||
p3.setResume(r3);
|
||||
p3.setId(3L);
|
||||
entityManager.persist(p3);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
final Person found2 = entityManager.find(Person.class, 2L);
|
||||
entityManager.remove(found2);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
boolean weFound3 = false;
|
||||
|
||||
for (Document dbo : this.mongoTemplate.getCollection(mongoTemplate.getCollectionName(Person.class)).find()) {
|
||||
Assert.assertTrue(!dbo.get("_entity_id").equals(2L));
|
||||
if (dbo.get("_entity_id").equals(3L)) {
|
||||
weFound3 = true;
|
||||
}
|
||||
}
|
||||
Assert.assertTrue(weFound3);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
public class Address {
|
||||
|
||||
private Integer streetNumber;
|
||||
private String streetName;
|
||||
private String city;
|
||||
private String state;
|
||||
private String zip;
|
||||
|
||||
public Address(Integer streetNumber, String streetName, String city, String state, String zip) {
|
||||
super();
|
||||
this.streetNumber = streetNumber;
|
||||
this.streetName = streetName;
|
||||
this.city = city;
|
||||
this.state = state;
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
public Integer getStreetNumber() {
|
||||
return streetNumber;
|
||||
}
|
||||
|
||||
public void setStreetNumber(Integer streetNumber) {
|
||||
this.streetNumber = streetNumber;
|
||||
}
|
||||
|
||||
public String getStreetName() {
|
||||
return streetName;
|
||||
}
|
||||
|
||||
public void setStreetName(String streetName) {
|
||||
this.streetName = streetName;
|
||||
}
|
||||
|
||||
public String getCity() {
|
||||
return city;
|
||||
}
|
||||
|
||||
public void setCity(String city) {
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
public String getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void setState(String state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public String getZip() {
|
||||
return zip;
|
||||
}
|
||||
|
||||
public void setZip(String zip) {
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
Long id;
|
||||
|
||||
private String name;
|
||||
|
||||
private int age;
|
||||
|
||||
private java.util.Date birthDate;
|
||||
|
||||
@RelatedDocument
|
||||
private Address address;
|
||||
|
||||
@RelatedDocument
|
||||
private Resume resume;
|
||||
|
||||
public Person() {
|
||||
}
|
||||
|
||||
public Person(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
this.birthDate = new java.util.Date();
|
||||
}
|
||||
|
||||
public void birthday() {
|
||||
++age;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
public void setAge(int age) {
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public java.util.Date getBirthDate() {
|
||||
return birthDate;
|
||||
}
|
||||
|
||||
public void setBirthDate(java.util.Date birthDate) {
|
||||
this.birthDate = birthDate;
|
||||
}
|
||||
|
||||
public Resume getResume() {
|
||||
return resume;
|
||||
}
|
||||
|
||||
public void setResume(Resume resume) {
|
||||
this.resume = resume;
|
||||
}
|
||||
|
||||
public Address getAddress() {
|
||||
return address;
|
||||
}
|
||||
|
||||
public void setAddress(Address address) {
|
||||
this.address = address;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
@Document
|
||||
public class Resume {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(Resume.class);
|
||||
|
||||
@Id
|
||||
private ObjectId id;
|
||||
|
||||
private String education = "";
|
||||
|
||||
private String jobs = "";
|
||||
|
||||
public String getId() {
|
||||
return id.toString();
|
||||
}
|
||||
|
||||
public String getEducation() {
|
||||
return education;
|
||||
}
|
||||
|
||||
public void addEducation(String education) {
|
||||
LOGGER.debug("Adding education " + education);
|
||||
this.education = this.education + (this.education.length() > 0 ? "; " : "") + education;
|
||||
}
|
||||
|
||||
public String getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public void addJob(String job) {
|
||||
LOGGER.debug("Adding job " + job);
|
||||
this.jobs = this.jobs + (this.jobs.length() > 0 ? "; " : "") + job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Resume [education=" + education + ", jobs=" + jobs + "]";
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<persistence xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
version="2.0"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd">
|
||||
<persistence-unit name="test" transaction-type="RESOURCE_LOCAL">
|
||||
<provider>org.hibernate.ejb.HibernatePersistence</provider>
|
||||
<class>org.springframework.data.mongodb.crossstore.test.Person</class>
|
||||
<properties>
|
||||
<property name="hibernate.dialect" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
<!--value='create' to build a new database on each run; value='update' to modify an existing database; value='create-drop' means the same as 'create' but also drops tables when Hibernate closes; value='validate' makes no changes to the database-->
|
||||
<property name="hibernate.hbm2ddl.auto" value="update"/>
|
||||
<property name="hibernate.ejb.naming_strategy" value="org.hibernate.cfg.ImprovedNamingStrategy"/>
|
||||
</properties>
|
||||
</persistence-unit>
|
||||
</persistence>
|
||||
@@ -0,0 +1,72 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:tx="http://www.springframework.org/schema/tx"
|
||||
xmlns:jdbc="http://www.springframework.org/schema/jdbc"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/jdbc http://www.springframework.org/schema/jdbc/spring-jdbc-3.0.xsd
|
||||
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||
http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.0.xsd
|
||||
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
|
||||
|
||||
<context:spring-configured/>
|
||||
|
||||
<context:component-scan base-package="org.springframework.persistence.mongodb.test">
|
||||
<context:exclude-filter expression="org.springframework.stereotype.Controller" type="annotation"/>
|
||||
</context:component-scan>
|
||||
|
||||
<mongo:mapping-converter/>
|
||||
|
||||
<!-- Mongo config -->
|
||||
<bean id="mongoClient" class="org.springframework.data.mongodb.core.MongoClientFactoryBean">
|
||||
<property name="host" value="localhost"/>
|
||||
<property name="port" value="27017"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoDbFactory" class="org.springframework.data.mongodb.core.SimpleMongoDbFactory">
|
||||
<constructor-arg name="mongoClient" ref="mongoClient"/>
|
||||
<constructor-arg name="databaseName" value="database"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg name="mongoDbFactory" ref="mongoDbFactory"/>
|
||||
<constructor-arg name="mongoConverter" ref="mappingConverter"/>
|
||||
</bean>
|
||||
|
||||
<bean class="org.springframework.data.mongodb.core.MongoExceptionTranslator"/>
|
||||
|
||||
<!-- Mongo aspect config -->
|
||||
<bean class="org.springframework.data.mongodb.crossstore.MongoDocumentBacking"
|
||||
factory-method="aspectOf">
|
||||
<property name="changeSetPersister" ref="mongoChangeSetPersister"/>
|
||||
</bean>
|
||||
<bean id="mongoChangeSetPersister"
|
||||
class="org.springframework.data.mongodb.crossstore.MongoChangeSetPersister">
|
||||
<property name="mongoTemplate" ref="mongoTemplate"/>
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<jdbc:embedded-database id="dataSource" type="HSQL">
|
||||
</jdbc:embedded-database>
|
||||
|
||||
<bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager">
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<tx:annotation-driven mode="aspectj" transaction-manager="transactionManager"/>
|
||||
|
||||
<bean class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean" id="entityManagerFactory">
|
||||
<property name="persistenceUnitName" value="test"/>
|
||||
<property name="dataSource" ref="dataSource"/>
|
||||
<property name="jpaVendorAdapter">
|
||||
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
|
||||
<property name="showSql" value="true"/>
|
||||
<property name="generateDdl" value="true"/>
|
||||
<property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d %5p %40.40c:%4L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!--
|
||||
<logger name="org.springframework" level="debug" />
|
||||
-->
|
||||
|
||||
<root level="error">
|
||||
<appender-ref ref="console" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
||||
@@ -1,6 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -14,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.RC1</version>
|
||||
<version>2.0.15.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -36,15 +35,8 @@
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.RC1</version>
|
||||
<version>2.0.15.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
<equalsverifier>1.7.8</equalsverifier>
|
||||
<java-module-name>spring.data.mongodb</java-module-name>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
<multithreadedtc>1.01</multithreadedtc>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@@ -83,14 +82,14 @@
|
||||
|
||||
<!-- reactive -->
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
<version>${mongo.reactivestreams}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-async</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
@@ -107,7 +106,7 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
@@ -119,14 +118,14 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava-reactive-streams</artifactId>
|
||||
<version>${rxjava-reactive-streams}</version>
|
||||
<optional>true</optional>
|
||||
@@ -246,53 +245,45 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>edu.umd.cs.mtc</groupId>
|
||||
<artifactId>multithreadedtc</artifactId>
|
||||
<version>${multithreadedtc}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>jta</artifactId>
|
||||
<version>1.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Kotlin extension -->
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-reflect</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-core</artifactId>
|
||||
<version>${kotlin-coroutines}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-reactor</artifactId>
|
||||
<version>${kotlin-coroutines}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.mockk</groupId>
|
||||
<artifactId>mockk</artifactId>
|
||||
<version>${mockk}</version>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-test</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.nhaarman</groupId>
|
||||
<artifactId>mockito-kotlin</artifactId>
|
||||
<version>1.5.0</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-reflect</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
@@ -329,7 +320,6 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<useFile>false</useFile>
|
||||
<includes>
|
||||
<include>**/*Tests.java</include>
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Copyright 2010-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Exception being thrown in case we cannot connect to a MongoDB instance.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException {
|
||||
|
||||
private final UserCredentials credentials;
|
||||
private final @Nullable String database;
|
||||
|
||||
private static final long serialVersionUID = 1172099106475265589L;
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
this.database = null;
|
||||
this.credentials = UserCredentials.NO_CREDENTIALS;
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg) {
|
||||
this(msg, null, UserCredentials.NO_CREDENTIALS);
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, @Nullable String database, UserCredentials credentials) {
|
||||
super(msg);
|
||||
this.database = database;
|
||||
this.credentials = credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public UserCredentials getCredentials() {
|
||||
return this.credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the database trying to be accessed.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.NonTransientDataAccessException;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* {@link NonTransientDataAccessException} specific to MongoDB {@link com.mongodb.session.ClientSession} related data
|
||||
* access failures such as reading data using an already closed session.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
public class ClientSessionException extends NonTransientDataAccessException {
|
||||
|
||||
/**
|
||||
* Constructor for {@link ClientSessionException}.
|
||||
*
|
||||
* @param msg the detail message. Must not be {@literal null}.
|
||||
*/
|
||||
public ClientSessionException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for {@link ClientSessionException}.
|
||||
*
|
||||
* @param msg the detail message. Can be {@literal null}.
|
||||
* @param cause the root cause. Can be {@literal null}.
|
||||
*/
|
||||
public ClientSessionException(@Nullable String msg, @Nullable Throwable cause) {
|
||||
super(msg, cause);
|
||||
}
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
/*
|
||||
* Copyright 2017-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.codecs.Codec;
|
||||
import org.bson.codecs.configuration.CodecConfigurationException;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Provider interface to obtain {@link CodecRegistry} from the underlying MongoDB Java driver.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface CodecRegistryProvider {
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws IllegalStateException if {@link CodecRegistry} cannot be obtained.
|
||||
*/
|
||||
CodecRegistry getCodecRegistry();
|
||||
|
||||
/**
|
||||
* Checks if a {@link Codec} is registered for a given type.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return true if {@link #getCodecRegistry()} holds a {@link Codec} for given type.
|
||||
* @throws IllegalStateException if {@link CodecRegistry} cannot be obtained.
|
||||
*/
|
||||
default boolean hasCodecFor(Class<?> type) {
|
||||
return getCodecFor(type).isPresent();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link Codec} registered for the given {@literal type} or an {@link Optional#empty() empty Optional}
|
||||
* instead.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
* @throws IllegalArgumentException if {@literal type} is {@literal null}.
|
||||
*/
|
||||
default <T> Optional<Codec<T>> getCodecFor(Class<T> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
try {
|
||||
return Optional.of(getCodecRegistry().get(type));
|
||||
} catch (CodecConfigurationException e) {
|
||||
// ignore
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
@@ -1,240 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.support.ResourceHolderSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDbFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
*/
|
||||
public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDbFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
}
|
||||
|
||||
ClientSession session = doGetSession(factory, sessionSynchronization);
|
||||
|
||||
if (session == null) {
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
}
|
||||
|
||||
MongoDbFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getDb(dbName) : factoryToUse.getDb();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the {@link MongoDbFactory} is actually bound to a {@link ClientSession} that has an active transaction, or
|
||||
* if a {@link TransactionSynchronization} has been registered for the {@link MongoDbFactory resource} and if the
|
||||
* associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param dbFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return {@literal true} if the factory has an ongoing transaction.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
public static boolean isTransactionActive(MongoDbFactory dbFactory) {
|
||||
|
||||
if (dbFactory.isTransactionActive()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory);
|
||||
return resourceHolder != null && resourceHolder.hasActiveTransaction();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory);
|
||||
|
||||
// check for native MongoDB transaction
|
||||
if (resourceHolder != null && (resourceHolder.hasSession() || resourceHolder.isSynchronizedWithTransaction())) {
|
||||
|
||||
if (!resourceHolder.hasSession()) {
|
||||
resourceHolder.setSession(createClientSession(dbFactory));
|
||||
}
|
||||
|
||||
return resourceHolder.getSession();
|
||||
}
|
||||
|
||||
if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// init a non native MongoDB transaction by registering a MongoSessionSynchronization
|
||||
|
||||
resourceHolder = new MongoResourceHolder(createClientSession(dbFactory), dbFactory);
|
||||
resourceHolder.getRequiredSession().startTransaction();
|
||||
|
||||
TransactionSynchronizationManager
|
||||
.registerSynchronization(new MongoSessionSynchronization(resourceHolder, dbFactory));
|
||||
resourceHolder.setSynchronizedWithTransaction(true);
|
||||
TransactionSynchronizationManager.bindResource(dbFactory, resourceHolder);
|
||||
|
||||
return resourceHolder.getSession();
|
||||
}
|
||||
|
||||
private static ClientSession createClientSession(MongoDbFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when
|
||||
* participating in a non-native MongoDB transaction, such as a Jta or JDBC transaction.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
private static class MongoSessionSynchronization extends ResourceHolderSynchronization<MongoResourceHolder, Object> {
|
||||
|
||||
private final MongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDbFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory);
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) {
|
||||
|
||||
if (resourceHolder.hasActiveTransaction()) {
|
||||
resourceHolder.getRequiredSession().commitTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public void afterCompletion(int status) {
|
||||
|
||||
if (status == TransactionSynchronization.STATUS_ROLLED_BACK && this.resourceHolder.hasActiveTransaction()) {
|
||||
resourceHolder.getRequiredSession().abortTransaction();
|
||||
}
|
||||
|
||||
super.afterCompletion(status);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
if (resourceHolder.hasActiveSession()) {
|
||||
resourceHolder.getRequiredSession().close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,14 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -30,9 +27,8 @@ import com.mongodb.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
public interface MongoDbFactory {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
@@ -58,65 +54,5 @@ public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvi
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the legacy database entry point. Please consider {@link #getDb()} instead.
|
||||
*
|
||||
* @return
|
||||
* @deprecated since 2.1, use {@link #getDb()}. This method will be removed with a future version as it works only
|
||||
* with the legacy MongoDB driver.
|
||||
*/
|
||||
@Deprecated
|
||||
DB getLegacyDb();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getDb().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDbFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDbFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDbFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.support.ResourceHolderSupport;
|
||||
|
||||
import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}.
|
||||
* {@link MongoTransactionManager} binds instances of this class to the thread.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @see MongoTransactionManager
|
||||
* @see org.springframework.data.mongodb.core.MongoTemplate
|
||||
*/
|
||||
class MongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private MongoDbFactory dbFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDbFactory dbFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.dbFactory = dbFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ClientSession}. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
ClientSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the required associated {@link ClientSession}.
|
||||
* @throws IllegalStateException if no {@link ClientSession} is associated with this {@link MongoResourceHolder}.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalStateException("No session available!");
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link MongoDbFactory}.
|
||||
*/
|
||||
public MongoDbFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClientSession} to guard.
|
||||
*
|
||||
* @param session can be {@literal null}.
|
||||
*/
|
||||
public void setSession(@Nullable ClientSession session) {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only set the timeout if it does not match the {@link TransactionDefinition#TIMEOUT_DEFAULT default timeout}.
|
||||
*
|
||||
* @param seconds
|
||||
*/
|
||||
void setTimeoutIfNotDefaulted(int seconds) {
|
||||
|
||||
if (seconds != TransactionDefinition.TIMEOUT_DEFAULT) {
|
||||
setTimeoutInSeconds(seconds);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if session is not {@literal null}.
|
||||
*/
|
||||
boolean hasSession() {
|
||||
return session != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session is active and has not been closed.
|
||||
*/
|
||||
boolean hasActiveSession() {
|
||||
|
||||
if (!hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hasServerSession() && !getRequiredSession().getServerSession().isClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session has an active transaction.
|
||||
* @since 2.1.3
|
||||
* @see #hasActiveSession()
|
||||
*/
|
||||
boolean hasActiveTransaction() {
|
||||
|
||||
if (!hasActiveSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated
|
||||
* that is accessible via {@link ClientSession#getServerSession()}.
|
||||
*/
|
||||
boolean hasServerSession() {
|
||||
|
||||
try {
|
||||
return getRequiredSession().getServerSession() != null;
|
||||
} catch (IllegalStateException serverSessionClosed) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A simple interface for obtaining a {@link ClientSession} to be consumed by
|
||||
* {@link org.springframework.data.mongodb.core.MongoOperations} and MongoDB native operations that support causal
|
||||
* consistency and transactions.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} with with given options.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @throws org.springframework.dao.DataAccessException
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* A specific {@link ClientSessionException} related to issues with a transaction such as aborted or non existing
|
||||
* transactions.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
public class MongoTransactionException extends ClientSessionException {
|
||||
|
||||
/**
|
||||
* Constructor for {@link MongoTransactionException}.
|
||||
*
|
||||
* @param msg the detail message. Must not be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for {@link ClientSessionException}.
|
||||
*
|
||||
* @param msg the detail message. Can be {@literal null}.
|
||||
* @param cause the root cause. Can be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionException(@Nullable String msg, @Nullable Throwable cause) {
|
||||
super(msg, cause);
|
||||
}
|
||||
}
|
||||
@@ -1,526 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionException;
|
||||
import org.springframework.transaction.TransactionSystemException;
|
||||
import org.springframework.transaction.support.AbstractPlatformTransactionManager;
|
||||
import org.springframework.transaction.support.DefaultTransactionStatus;
|
||||
import org.springframework.transaction.support.ResourceTransactionManager;
|
||||
import org.springframework.transaction.support.SmartTransactionObject;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.TransactionOptions;
|
||||
import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDbFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDbFactory} to the thread.
|
||||
* <p />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDbFactory)} instead of a standard {@link MongoDbFactory#getDb()} call.
|
||||
* Spring classes such as {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
* behavior as outlined in the MongoDB reference manual.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization)
|
||||
*/
|
||||
public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
implements ResourceTransactionManager, InitializingBean {
|
||||
|
||||
private @Nullable MongoDbFactory dbFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link MongoDbFactory db factory} has to be {@link #setDbFactory(MongoDbFactory) set}
|
||||
* before using the instance. Use this constructor to prepare a {@link MongoTransactionManager} via a
|
||||
* {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDbFactory(MongoDbFactory)
|
||||
* @see #setTransactionSynchronization(int)
|
||||
*/
|
||||
public MongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory}.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory) {
|
||||
this(dbFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory} applying the
|
||||
* given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
|
||||
this.dbFactory = dbFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doGetTransaction()
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction() throws TransactionException {
|
||||
|
||||
MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager
|
||||
.getResource(getRequiredDbFactory());
|
||||
return new MongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doBegin(java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException {
|
||||
|
||||
MongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
|
||||
MongoResourceHolder resourceHolder = newResourceHolder(definition,
|
||||
ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
mongoTransactionObject.setResourceHolder(resourceHolder);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger
|
||||
.debug(String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
try {
|
||||
mongoTransactionObject.startTransaction(options);
|
||||
} catch (MongoException ex) {
|
||||
throw new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
resourceHolder.setSynchronizedWithTransaction(true);
|
||||
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doSuspend(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Object doSuspend(Object transaction) throws TransactionException {
|
||||
|
||||
MongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
mongoTransactionObject.setResourceHolder(null);
|
||||
|
||||
return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doResume(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void doResume(@Nullable Object transaction, Object suspendedResources) {
|
||||
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCommit(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected final void doCommit(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
MongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to commit transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
try {
|
||||
doCommit(mongoTransactionObject);
|
||||
} catch (Exception ex) {
|
||||
|
||||
throw new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook to perform an actual commit of the given transaction.<br />
|
||||
* If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding
|
||||
* {@literal error labels}. <br />
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit. <br />
|
||||
* <code>
|
||||
* <pre>
|
||||
* int retries = 3;
|
||||
* do {
|
||||
* try {
|
||||
* transactionObject.commitTransaction();
|
||||
* break;
|
||||
* } catch (MongoException ex) {
|
||||
* if (!ex.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)) {
|
||||
* throw ex;
|
||||
* }
|
||||
* }
|
||||
* Thread.sleep(500);
|
||||
* } while (--retries > 0);
|
||||
* </pre>
|
||||
* </code>
|
||||
*
|
||||
* @param transactionObject never {@literal null}.
|
||||
* @throws Exception in case of transaction errors.
|
||||
*/
|
||||
protected void doCommit(MongoTransactionObject transactionObject) throws Exception {
|
||||
transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doRollback(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected void doRollback(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
MongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to abort transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
try {
|
||||
mongoTransactionObject.abortTransaction();
|
||||
} catch (MongoException ex) {
|
||||
|
||||
throw new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doSetRollbackOnly(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
MongoTransactionObject transactionObject = extractMongoTransaction(status);
|
||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCleanupAfterCompletion(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void doCleanupAfterCompletion(Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(MongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
MongoTransactionObject mongoTransactionObject = (MongoTransactionObject) transaction;
|
||||
|
||||
// Remove the connection holder from the thread.
|
||||
TransactionSynchronizationManager.unbindResource(getRequiredDbFactory());
|
||||
mongoTransactionObject.getRequiredResourceHolder().clear();
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to release Session %s after transaction.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
mongoTransactionObject.closeSession();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoDbFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDbFactory(MongoDbFactory dbFactory) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
this.dbFactory = dbFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link TransactionOptions} to be applied when starting transactions.
|
||||
*
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public void setOptions(@Nullable TransactionOptions options) {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link MongoDbFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public MongoDbFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory getResourceFactory() {
|
||||
return getRequiredDbFactory();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDbFactory();
|
||||
}
|
||||
|
||||
private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) {
|
||||
|
||||
MongoDbFactory dbFactory = getResourceFactory();
|
||||
|
||||
MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory);
|
||||
resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition));
|
||||
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #dbFactory} is {@literal null}.
|
||||
*/
|
||||
private MongoDbFactory getRequiredDbFactory() {
|
||||
|
||||
Assert.state(dbFactory != null,
|
||||
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
private static MongoTransactionObject extractMongoTransaction(Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(MongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return (MongoTransactionObject) transaction;
|
||||
}
|
||||
|
||||
private static MongoTransactionObject extractMongoTransaction(DefaultTransactionStatus status) {
|
||||
|
||||
Assert.isInstanceOf(MongoTransactionObject.class, status.getTransaction(),
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class,
|
||||
status.getTransaction().getClass()));
|
||||
|
||||
return (MongoTransactionObject) status.getTransaction();
|
||||
}
|
||||
|
||||
private static String debugString(@Nullable ClientSession session) {
|
||||
|
||||
if (session == null) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()),
|
||||
Integer.toHexString(session.hashCode()));
|
||||
|
||||
try {
|
||||
if (session.getServerSession() != null) {
|
||||
debugString += String.format("id = %s, ", session.getServerSession().getIdentifier());
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
debugString += String.format("error = %s", e.getMessage());
|
||||
}
|
||||
|
||||
debugString += "]";
|
||||
|
||||
return debugString;
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by
|
||||
* {@link MongoTransactionManager}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @see MongoResourceHolder
|
||||
*/
|
||||
protected static class MongoTransactionObject implements SmartTransactionObject {
|
||||
|
||||
private @Nullable MongoResourceHolder resourceHolder;
|
||||
|
||||
MongoTransactionObject(@Nullable MongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoResourceHolder}.
|
||||
*
|
||||
* @param resourceHolder can be {@literal null}.
|
||||
*/
|
||||
void setResourceHolder(@Nullable MongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a {@link MongoResourceHolder} is set.
|
||||
*/
|
||||
final boolean hasResourceHolder() {
|
||||
return resourceHolder != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a MongoDB transaction optionally given {@link TransactionOptions}.
|
||||
*
|
||||
* @param options can be {@literal null}
|
||||
*/
|
||||
void startTransaction(@Nullable TransactionOptions options) {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (options != null) {
|
||||
session.startTransaction(options);
|
||||
} else {
|
||||
session.startTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the transaction.
|
||||
*/
|
||||
public void commitTransaction() {
|
||||
getRequiredSession().commitTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback (abort) the transaction.
|
||||
*/
|
||||
public void abortTransaction() {
|
||||
getRequiredSession().abortTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a {@link ClientSession} without regard to its transactional state.
|
||||
*/
|
||||
void closeSession() {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (session.getServerSession() != null && !session.getServerSession().isClosed()) {
|
||||
session.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public ClientSession getSession() {
|
||||
return resourceHolder != null ? resourceHolder.getSession() : null;
|
||||
}
|
||||
|
||||
private MongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present. o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
TransactionSynchronizationUtils.triggerFlush();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -13,27 +13,22 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Interface for factories creating reactive {@link MongoDatabase} instances.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
public interface ReactiveMongoDatabaseFactory {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
@@ -58,45 +53,4 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the reactive MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
Mono<ClientSession> getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoDatabaseFactory} returning
|
||||
* {@link MongoDatabase} instances that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ReactiveMongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link ReactiveMongoDatabaseFactory} is bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.2
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,278 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.context.Context;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.NoTransactionException;
|
||||
import org.springframework.transaction.reactive.ReactiveResourceSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.ResourceHolderSynchronization;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Check if the {@link ReactiveMongoDatabaseFactory} is actually bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an active transaction, or if a
|
||||
* {@link org.springframework.transaction.reactive.TransactionSynchronization} has been registered for the
|
||||
* {@link ReactiveMongoDatabaseFactory resource} and if the associated
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param databaseFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting {@literal true} if the factory has an ongoing transaction.
|
||||
*/
|
||||
public static Mono<Boolean> isTransactionActive(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
if (databaseFactory.isTransactionActive()) {
|
||||
return Mono.just(true);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction() //
|
||||
.map(it -> {
|
||||
|
||||
ReactiveMongoResourceHolder holder = (ReactiveMongoResourceHolder) it.getResource(databaseFactory);
|
||||
return holder != null && holder.hasActiveTransaction();
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> Mono.just(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.map(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
})
|
||||
.onErrorResume(NoTransactionException.class,
|
||||
e -> Mono.fromSupplier(() -> getMongoDatabaseOrDefault(dbName, factory)))
|
||||
.defaultIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static MongoDatabase getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> doGetSession(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoDatabaseFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
final ReactiveMongoResourceHolder registeredHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(dbFactory);
|
||||
|
||||
// check for native MongoDB transaction
|
||||
if (registeredHolder != null
|
||||
&& (registeredHolder.hasSession() || registeredHolder.isSynchronizedWithTransaction())) {
|
||||
|
||||
return registeredHolder.hasSession() ? Mono.just(registeredHolder.getSession())
|
||||
: createClientSession(dbFactory).map(registeredHolder::setSessionIfAbsent);
|
||||
}
|
||||
|
||||
if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
// init a non native MongoDB transaction by registering a MongoSessionSynchronization
|
||||
return createClientSession(dbFactory).map(session -> {
|
||||
|
||||
ReactiveMongoResourceHolder newHolder = new ReactiveMongoResourceHolder(session, dbFactory);
|
||||
newHolder.getRequiredSession().startTransaction();
|
||||
|
||||
synchronizationManager
|
||||
.registerSynchronization(new MongoSessionSynchronization(synchronizationManager, newHolder, dbFactory));
|
||||
newHolder.setSynchronizedWithTransaction(true);
|
||||
synchronizationManager.bindResource(dbFactory, newHolder);
|
||||
|
||||
return newHolder.getSession();
|
||||
});
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> createClientSession(ReactiveMongoDatabaseFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when
|
||||
* participating in a non-native MongoDB transaction, such as a R2CBC transaction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
private static class MongoSessionSynchronization
|
||||
extends ReactiveResourceSynchronization<ReactiveMongoResourceHolder, Object> {
|
||||
|
||||
private final ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoResourceHolder resourceHolder, ReactiveMongoDatabaseFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory, synchronizationManager);
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (isTransactionActive(resourceHolder)) {
|
||||
return Mono.from(resourceHolder.getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> afterCompletion(int status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) {
|
||||
|
||||
return Mono.from(resourceHolder.getRequiredSession().abortTransaction()) //
|
||||
.then(super.afterCompletion(status));
|
||||
}
|
||||
|
||||
return super.afterCompletion(status);
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
if (resourceHolder.hasActiveSession()) {
|
||||
resourceHolder.getRequiredSession().close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private boolean isTransactionActive(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (!resourceHolder.hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return resourceHolder.getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,155 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.support.ResourceHolderSupport;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoTransactionManager
|
||||
* @see ReactiveMongoTemplate
|
||||
*/
|
||||
class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private ReactiveMongoDatabaseFactory databaseFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ClientSession}. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
ClientSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the required associated {@link ClientSession}.
|
||||
* @throws IllegalStateException if no session is associated.
|
||||
*/
|
||||
ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalStateException("No ClientSession associated");
|
||||
}
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ReactiveMongoDatabaseFactory}.
|
||||
*/
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClientSession} to guard.
|
||||
*
|
||||
* @param session can be {@literal null}.
|
||||
*/
|
||||
public void setSession(@Nullable ClientSession session) {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if session is not {@literal null}.
|
||||
*/
|
||||
boolean hasSession() {
|
||||
return session != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a
|
||||
* {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current
|
||||
* bound session is returned.
|
||||
*
|
||||
* @param session
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public ClientSession setSessionIfAbsent(@Nullable ClientSession session) {
|
||||
|
||||
if (!hasSession()) {
|
||||
setSession(session);
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session is active and has not been closed.
|
||||
*/
|
||||
boolean hasActiveSession() {
|
||||
|
||||
if (!hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hasServerSession() && !getRequiredSession().getServerSession().isClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session has an active transaction.
|
||||
* @see #hasActiveSession()
|
||||
*/
|
||||
boolean hasActiveTransaction() {
|
||||
|
||||
if (!hasActiveSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated
|
||||
* that is accessible via {@link ClientSession#getServerSession()}.
|
||||
*/
|
||||
boolean hasServerSession() {
|
||||
|
||||
try {
|
||||
return getRequiredSession().getServerSession() != null;
|
||||
} catch (IllegalStateException serverSessionClosed) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,530 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionException;
|
||||
import org.springframework.transaction.TransactionSystemException;
|
||||
import org.springframework.transaction.reactive.AbstractReactiveTransactionManager;
|
||||
import org.springframework.transaction.reactive.GenericReactiveTransaction;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.SmartTransactionObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.TransactionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <p />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
* behavior as outlined in the MongoDB reference manual.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean {
|
||||
|
||||
private @Nullable ReactiveMongoDatabaseFactory databaseFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDatabaseFactory(ReactiveMongoDatabaseFactory)
|
||||
*/
|
||||
public ReactiveMongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory}.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
this(databaseFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when
|
||||
* starting a new transaction.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
@Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
|
||||
this.databaseFactory = databaseFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
||||
throws TransactionException {
|
||||
|
||||
ReactiveMongoResourceHolder resourceHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(getRequiredDatabaseFactory());
|
||||
return new ReactiveMongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
||||
TransactionDefinition definition) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
|
||||
Mono<ReactiveMongoResourceHolder> holder = newResourceHolder(definition,
|
||||
ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
|
||||
return holder.doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.setResourceHolder(resourceHolder);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
}).doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.startTransaction(options);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
})//
|
||||
.onErrorMap(
|
||||
ex -> new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex))
|
||||
.doOnSuccess(resourceHolder -> {
|
||||
|
||||
synchronizationManager.bindResource(getRequiredDatabaseFactory(), resourceHolder);
|
||||
}).then();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
||||
throws TransactionException {
|
||||
|
||||
return Mono.fromSupplier(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
mongoTransactionObject.setResourceHolder(null);
|
||||
|
||||
return synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
||||
Object suspendedResources) {
|
||||
return Mono
|
||||
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to commit transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return doCommit(synchronizationManager, mongoTransactionObject).onErrorMap(ex -> {
|
||||
return new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook to perform an actual commit of the given transaction.<br />
|
||||
* If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding
|
||||
* {@literal error labels}. <br />
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit.
|
||||
*
|
||||
* @param synchronizationManager reactive synchronization manager.
|
||||
* @param transactionObject never {@literal null}.
|
||||
*/
|
||||
protected Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoTransactionObject transactionObject) {
|
||||
return transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to abort transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return mongoTransactionObject.abortTransaction().onErrorResume(MongoException.class, ex -> {
|
||||
return Mono
|
||||
.error(new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject transactionObject = extractMongoTransaction(status);
|
||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
||||
Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = (ReactiveMongoTransactionObject) transaction;
|
||||
|
||||
// Remove the connection holder from the thread.
|
||||
synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
mongoTransactionObject.getRequiredResourceHolder().clear();
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to release Session %s after transaction.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
mongoTransactionObject.closeSession();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReactiveMongoDatabaseFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link TransactionOptions} to be applied when starting transactions.
|
||||
*
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public void setOptions(@Nullable TransactionOptions options) {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReactiveMongoDatabaseFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDatabaseFactory();
|
||||
}
|
||||
|
||||
private Mono<ReactiveMongoResourceHolder> newResourceHolder(TransactionDefinition definition,
|
||||
ClientSessionOptions options) {
|
||||
|
||||
ReactiveMongoDatabaseFactory dbFactory = getRequiredDatabaseFactory();
|
||||
|
||||
return dbFactory.getSession(options).map(session -> new ReactiveMongoResourceHolder(session, dbFactory));
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #databaseFactory} is {@literal null}.
|
||||
*/
|
||||
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
||||
|
||||
Assert.state(databaseFactory != null,
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) transaction;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(GenericReactiveTransaction status) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, status.getTransaction(),
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
status.getTransaction().getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) status.getTransaction();
|
||||
}
|
||||
|
||||
private static String debugString(@Nullable ClientSession session) {
|
||||
|
||||
if (session == null) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()),
|
||||
Integer.toHexString(session.hashCode()));
|
||||
|
||||
try {
|
||||
if (session.getServerSession() != null) {
|
||||
debugString += String.format("id = %s, ", session.getServerSession().getIdentifier());
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
debugString += String.format("error = %s", e.getMessage());
|
||||
}
|
||||
|
||||
debugString += "]";
|
||||
|
||||
return debugString;
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by
|
||||
* {@link ReactiveMongoTransactionManager}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoResourceHolder
|
||||
*/
|
||||
protected static class ReactiveMongoTransactionObject implements SmartTransactionObject {
|
||||
|
||||
private @Nullable ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
ReactiveMongoTransactionObject(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoResourceHolder}.
|
||||
*
|
||||
* @param resourceHolder can be {@literal null}.
|
||||
*/
|
||||
void setResourceHolder(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a {@link MongoResourceHolder} is set.
|
||||
*/
|
||||
final boolean hasResourceHolder() {
|
||||
return resourceHolder != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a MongoDB transaction optionally given {@link TransactionOptions}.
|
||||
*
|
||||
* @param options can be {@literal null}
|
||||
*/
|
||||
void startTransaction(@Nullable TransactionOptions options) {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (options != null) {
|
||||
session.startTransaction(options);
|
||||
} else {
|
||||
session.startTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the transaction.
|
||||
*/
|
||||
public Mono<Void> commitTransaction() {
|
||||
return Mono.from(getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback (abort) the transaction.
|
||||
*/
|
||||
public Mono<Void> abortTransaction() {
|
||||
return Mono.from(getRequiredSession().abortTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a {@link ClientSession} without regard to its transactional state.
|
||||
*/
|
||||
void closeSession() {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (session.getServerSession() != null && !session.getServerSession().isClosed()) {
|
||||
session.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public ClientSession getSession() {
|
||||
return resourceHolder != null ? resourceHolder.getSession() : null;
|
||||
}
|
||||
|
||||
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
throw new UnsupportedOperationException("flush() not supported");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,215 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Optional;
|
||||
import java.util.function.BiFunction;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.springframework.core.MethodClassKey;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ConcurrentReferenceHashMap;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.session.ClientSession;
|
||||
|
||||
/**
|
||||
* {@link MethodInterceptor} implementation looking up and invoking an alternative target method having
|
||||
* {@link ClientSession} as its first argument. This allows seamless integration with the existing code base.
|
||||
* <p />
|
||||
* The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself
|
||||
* like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them
|
||||
* if not already proxied.
|
||||
*
|
||||
* @param <D> Type of the actual Mongo Database.
|
||||
* @param <C> Type of the actual Mongo Collection.
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public class SessionAwareMethodInterceptor<D, C> implements MethodInterceptor {
|
||||
|
||||
private static final MethodCache METHOD_CACHE = new MethodCache();
|
||||
|
||||
private final ClientSession session;
|
||||
private final ClientSessionOperator collectionDecorator;
|
||||
private final ClientSessionOperator databaseDecorator;
|
||||
private final Object target;
|
||||
private final Class<?> targetType;
|
||||
private final Class<?> collectionType;
|
||||
private final Class<?> databaseType;
|
||||
private final Class<? extends ClientSession> sessionType;
|
||||
|
||||
/**
|
||||
* Create a new SessionAwareMethodInterceptor for given target.
|
||||
*
|
||||
* @param session the {@link ClientSession} to be used on invocation.
|
||||
* @param target the original target object.
|
||||
* @param databaseType the MongoDB database type
|
||||
* @param databaseDecorator a {@link ClientSessionOperator} used to create the proxy for an imperative / reactive
|
||||
* {@code MongoDatabase}.
|
||||
* @param collectionType the MongoDB collection type.
|
||||
* @param collectionDecorator a {@link ClientSessionOperator} used to create the proxy for an imperative / reactive
|
||||
* {@code MongoCollection}.
|
||||
* @param <T> target object type.
|
||||
*/
|
||||
public <T> SessionAwareMethodInterceptor(ClientSession session, T target, Class<? extends ClientSession> sessionType,
|
||||
Class<D> databaseType, ClientSessionOperator<D> databaseDecorator, Class<C> collectionType,
|
||||
ClientSessionOperator<C> collectionDecorator) {
|
||||
|
||||
Assert.notNull(session, "ClientSession must not be null!");
|
||||
Assert.notNull(target, "Target must not be null!");
|
||||
Assert.notNull(sessionType, "SessionType must not be null!");
|
||||
Assert.notNull(databaseType, "Database type must not be null!");
|
||||
Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null!");
|
||||
Assert.notNull(collectionType, "Collection type must not be null!");
|
||||
Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null!");
|
||||
|
||||
this.session = session;
|
||||
this.target = target;
|
||||
this.databaseType = ClassUtils.getUserClass(databaseType);
|
||||
this.collectionType = ClassUtils.getUserClass(collectionType);
|
||||
this.collectionDecorator = collectionDecorator;
|
||||
this.databaseDecorator = databaseDecorator;
|
||||
|
||||
this.targetType = ClassUtils.isAssignable(databaseType, target.getClass()) ? databaseType : collectionType;
|
||||
this.sessionType = sessionType;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
|
||||
if (requiresDecoration(methodInvocation.getMethod())) {
|
||||
|
||||
Object target = methodInvocation.proceed();
|
||||
if (target instanceof Proxy) {
|
||||
return target;
|
||||
}
|
||||
|
||||
return decorate(target);
|
||||
}
|
||||
|
||||
if (!requiresSession(methodInvocation.getMethod())) {
|
||||
return methodInvocation.proceed();
|
||||
}
|
||||
|
||||
Optional<Method> targetMethod = METHOD_CACHE.lookup(methodInvocation.getMethod(), targetType, sessionType);
|
||||
|
||||
return !targetMethod.isPresent() ? methodInvocation.proceed()
|
||||
: ReflectionUtils.invokeMethod(targetMethod.get(), target,
|
||||
prependSessionToArguments(session, methodInvocation));
|
||||
}
|
||||
|
||||
private boolean requiresDecoration(Method method) {
|
||||
|
||||
return ClassUtils.isAssignable(databaseType, method.getReturnType())
|
||||
|| ClassUtils.isAssignable(collectionType, method.getReturnType());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object decorate(Object target) {
|
||||
|
||||
return ClassUtils.isAssignable(databaseType, target.getClass()) ? databaseDecorator.apply(session, target)
|
||||
: collectionDecorator.apply(session, target);
|
||||
}
|
||||
|
||||
private static boolean requiresSession(Method method) {
|
||||
|
||||
if (method.getParameterCount() == 0
|
||||
|| !ClassUtils.isAssignable(ClientSession.class, method.getParameterTypes()[0])) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static Object[] prependSessionToArguments(ClientSession session, MethodInvocation invocation) {
|
||||
|
||||
Object[] args = new Object[invocation.getArguments().length + 1];
|
||||
|
||||
args[0] = session;
|
||||
System.arraycopy(invocation.getArguments(), 0, args, 1, invocation.getArguments().length);
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple {@link Method} to {@link Method} caching facility for {@link ClientSession} overloaded targets.
|
||||
*
|
||||
* @since 2.1
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class MethodCache {
|
||||
|
||||
private final ConcurrentReferenceHashMap<MethodClassKey, Optional<Method>> cache = new ConcurrentReferenceHashMap<>();
|
||||
|
||||
/**
|
||||
* Lookup the target {@link Method}.
|
||||
*
|
||||
* @param method
|
||||
* @param targetClass
|
||||
* @return
|
||||
*/
|
||||
Optional<Method> lookup(Method method, Class<?> targetClass, Class<? extends ClientSession> sessionType) {
|
||||
|
||||
return cache.computeIfAbsent(new MethodClassKey(method, targetClass),
|
||||
val -> Optional.ofNullable(findTargetWithSession(method, targetClass, sessionType)));
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Method findTargetWithSession(Method sourceMethod, Class<?> targetType,
|
||||
Class<? extends ClientSession> sessionType) {
|
||||
|
||||
Class<?>[] argTypes = sourceMethod.getParameterTypes();
|
||||
Class<?>[] args = new Class<?>[argTypes.length + 1];
|
||||
args[0] = sessionType;
|
||||
System.arraycopy(argTypes, 0, args, 1, argTypes.length);
|
||||
|
||||
return ReflectionUtils.findMethod(targetType, sourceMethod.getName(), args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the cache contains an entry for {@link Method} and {@link Class}.
|
||||
*
|
||||
* @param method
|
||||
* @param targetClass
|
||||
* @return
|
||||
*/
|
||||
boolean contains(Method method, Class<?> targetClass) {
|
||||
return cache.containsKey(new MethodClassKey(method, targetClass));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an operation upon two operands of the same type, producing a result of the same type as the operands
|
||||
* accepting {@link ClientSession}. This is a specialization of {@link BiFunction} for the case where the operands and
|
||||
* the result are all of the same type.
|
||||
*
|
||||
* @param <T> the type of the operands and result of the operator
|
||||
*/
|
||||
public interface ClientSessionOperator<T> extends BiFunction<ClientSession, T, T> {}
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
/**
|
||||
* {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to
|
||||
* define in which type of transactions to participate if any.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public enum SessionSynchronization {
|
||||
|
||||
/**
|
||||
* Synchronize with any transaction even with empty transactions and initiate a MongoDB transaction when doing so by
|
||||
* registering a MongoDB specific {@link org.springframework.transaction.support.ResourceHolderSynchronization}.
|
||||
*/
|
||||
ALWAYS,
|
||||
|
||||
/**
|
||||
* Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}.
|
||||
*/
|
||||
ON_ACTUAL_TRANSACTION;
|
||||
}
|
||||
@@ -1,111 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.MongoClient;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoConfiguration
|
||||
*/
|
||||
@Configuration
|
||||
public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||
* {@link AbstractMongoClientConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||
* overridden to implement alternate behavior.
|
||||
*
|
||||
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||
* entities.
|
||||
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
protected String getMappingBasePackage() {
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
|
||||
return converter;
|
||||
}
|
||||
}
|
||||
@@ -29,10 +29,7 @@ import org.springframework.lang.Nullable;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.MongoClient}.
|
||||
* <p />
|
||||
* <strong>INFO:</strong>In case you want to use {@link com.mongodb.client.MongoClients} for configuration please refer
|
||||
* to {@link AbstractMongoClientConfiguration}.
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
@@ -41,10 +38,10 @@ import com.mongodb.MongoClient;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoClientConfiguration
|
||||
*/
|
||||
@Configuration
|
||||
public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport {
|
||||
public abstract class
|
||||
AbstractMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
@@ -66,7 +63,7 @@ public abstract class AbstractMongoConfiguration extends MongoConfigurationSuppo
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
* instance configured in {@link #mongo()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
@@ -114,5 +111,4 @@ public abstract class AbstractMongoConfiguration extends MongoConfigurationSuppo
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -22,7 +22,6 @@ import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
|
||||
@@ -81,7 +80,8 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(ReactiveMongoTemplate.NO_OP_REF_RESOLVER,
|
||||
mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
|
||||
return converter;
|
||||
|
||||
@@ -51,6 +51,7 @@ import org.springframework.core.type.filter.AssignableTypeFilter;
|
||||
import org.springframework.core.type.filter.TypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
@@ -101,6 +102,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id);
|
||||
|
||||
createIsNewStrategyFactoryBeanDefinition(ctxRef, parserContext, element);
|
||||
|
||||
// Need a reference to a Mongo instance
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
if (!StringUtils.hasText(dbFactoryRef)) {
|
||||
@@ -345,6 +348,20 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String createIsNewStrategyFactoryBeanDefinition(String mappingContextRef, ParserContext context,
|
||||
Element element) {
|
||||
|
||||
BeanDefinitionBuilder mappingContextStrategyFactoryBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(MappingContextIsNewStrategyFactory.class);
|
||||
mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef);
|
||||
|
||||
BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context);
|
||||
context.registerBeanComponent(
|
||||
builder.getComponent(mappingContextStrategyFactoryBuilder, IS_NEW_STRATEGY_FACTORY_BEAN_NAME));
|
||||
|
||||
return IS_NEW_STRATEGY_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches.
|
||||
*
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
@@ -27,33 +26,25 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEntityCallback} to transparently set auditing information
|
||||
* on an entity.
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEventListener} to transparently set auditing information on
|
||||
* an entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinitionParser {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element)
|
||||
*/
|
||||
@Override
|
||||
protected Class<?> getBeanClass(Element element) {
|
||||
return AuditingEntityCallback.class;
|
||||
return AuditingEventListener.class;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -89,24 +80,7 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
||||
mappingContextRef);
|
||||
parser.parse(element, parserContext);
|
||||
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler = getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element));
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(parserContext.getRegistry(), isNewAwareAuditingHandler,
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry,
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler, @Nullable Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registry.registerBeanDefinition(ReactiveAuditingEntityCallback.class.getName(), builder.getBeanDefinition());
|
||||
builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,23 +32,17 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
@@ -110,27 +104,12 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEntityCallback.class);
|
||||
.rootBeanDefinition(AuditingEventListener.class);
|
||||
listenerBeanDefinitionBuilder
|
||||
.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(registry, auditingHandlerDefinition.getSource());
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
@@ -27,12 +28,17 @@ import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.context.PersistentEntities;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.support.CachingIsNewStrategyFactory;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -81,11 +87,23 @@ public abstract class MongoConfigurationSupport {
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}.
|
||||
*
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException {
|
||||
|
||||
return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(
|
||||
new PersistentEntities(Arrays.<MappingContext<?, ?>> asList(new MappingContext[] { mongoMappingContext() }))));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
|
||||
@@ -172,16 +190,4 @@ public abstract class MongoConfigurationSupport {
|
||||
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
|
||||
: PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal true} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will be set to {@literal false}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,23 +80,12 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0]));
|
||||
} else if ("MONGODB-CR".equals(authMechanism)) {
|
||||
} else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
|
||||
Method createCRCredentialMethod = ReflectionUtils.findMethod(MongoCredential.class,
|
||||
"createMongoCRCredential", String.class, String.class, char[].class);
|
||||
|
||||
if (createCRCredentialMethod == null) {
|
||||
throw new IllegalArgumentException("MONGODB-CR is no longer supported.");
|
||||
}
|
||||
|
||||
MongoCredential credential = MongoCredential.class
|
||||
.cast(ReflectionUtils.invokeMethod(createCRCredentialMethod, null, userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
credentials.add(credential);
|
||||
|
||||
credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
@@ -113,12 +102,20 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createScramSha1Credential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.SCRAM_SHA_256_MECHANISM.equals(authMechanism)) {
|
||||
} else if ("SCRAM-SHA-256".equals(authMechanism)) {
|
||||
|
||||
Method createScramSha256Credential = ReflectionUtils.findMethod(MongoCredential.class,
|
||||
"createScramSha256Credential", String.class, String.class, char[].class);
|
||||
|
||||
if (createScramSha256Credential == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"SCRAM-SHA-256 auth mechanism is available as of MongoDB 4 and MongoDB Java Driver 3.8! Please make sure to use at least those versions.");
|
||||
}
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createScramSha256Credential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
credentials.add(MongoCredential.class.cast(ReflectionUtils.invokeMethod(createScramSha256Credential, null,
|
||||
userNameAndPassword[0], database, userNameAndPassword[1].toCharArray())));
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
|
||||
@@ -17,8 +17,6 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -26,19 +24,13 @@ import java.util.stream.Collectors;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
@@ -47,7 +39,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @since 2.0.8
|
||||
*/
|
||||
@AllArgsConstructor
|
||||
class AggregationUtil {
|
||||
@@ -86,13 +78,16 @@ class AggregationUtil {
|
||||
* @param context
|
||||
* @return
|
||||
*/
|
||||
List<Document> createPipeline(Aggregation aggregation, AggregationOperationContext context) {
|
||||
Document createPipeline(String collectionName, Aggregation aggregation, AggregationOperationContext context) {
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
return aggregation.toPipeline(context);
|
||||
return aggregation.toDocument(collectionName, context);
|
||||
}
|
||||
|
||||
return mapAggregationPipeline(aggregation.toPipeline(context));
|
||||
Document command = aggregation.toDocument(collectionName, context);
|
||||
command.put("pipeline", mapAggregationPipeline(command.get("pipeline", List.class)));
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -115,53 +110,6 @@ class AggregationUtil {
|
||||
return command;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@code $count} aggregation for {@link Query} and optionally a {@link Class entity class}.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param entityClass can be {@literal null} if the {@link Query} object is empty.
|
||||
* @return the {@link Aggregation} pipeline definition to run a {@code $count} aggregation.
|
||||
*/
|
||||
Aggregation createCountAggregation(Query query, @Nullable Class<?> entityClass) {
|
||||
|
||||
List<AggregationOperation> pipeline = computeCountAggregationPipeline(query, entityClass);
|
||||
|
||||
Aggregation aggregation = entityClass != null ? Aggregation.newAggregation(entityClass, pipeline)
|
||||
: Aggregation.newAggregation(pipeline);
|
||||
aggregation.withOptions(AggregationOptions.builder().collation(query.getCollation().orElse(null)).build());
|
||||
|
||||
return aggregation;
|
||||
}
|
||||
|
||||
private List<AggregationOperation> computeCountAggregationPipeline(Query query, @Nullable Class<?> entityType) {
|
||||
|
||||
CountOperation count = Aggregation.count().as("totalEntityCount");
|
||||
if (query.getQueryObject().isEmpty()) {
|
||||
return Collections.singletonList(count);
|
||||
}
|
||||
|
||||
Assert.notNull(entityType, "Entity type must not be null!");
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(),
|
||||
mappingContext.getPersistentEntity(entityType));
|
||||
|
||||
CriteriaDefinition criteria = new CriteriaDefinition() {
|
||||
|
||||
@Override
|
||||
public Document getCriteriaObject() {
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String getKey() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return Arrays.asList(Aggregation.match(criteria), count);
|
||||
}
|
||||
|
||||
private List<Document> mapAggregationPipeline(List<Document> pipeline) {
|
||||
|
||||
return pipeline.stream().map(val -> queryMapper.getMappedObject(val, Optional.empty()))
|
||||
|
||||
@@ -31,7 +31,6 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Minsu Kim
|
||||
* @since 1.9
|
||||
*/
|
||||
public interface BulkOperations {
|
||||
@@ -136,29 +135,6 @@ public interface BulkOperations {
|
||||
*/
|
||||
BulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||
return replaceOne(query, replacement, FindAndReplaceOptions.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
|
||||
@@ -1,202 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.messaging.Message;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.OperationType;
|
||||
|
||||
/**
|
||||
* {@link Message} implementation specific to MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change
|
||||
* Streams</a>.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@EqualsAndHashCode
|
||||
public class ChangeStreamEvent<T> {
|
||||
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_UPDATER = AtomicReferenceFieldUpdater
|
||||
.newUpdater(ChangeStreamEvent.class, Object.class, "converted");
|
||||
|
||||
private final @Nullable ChangeStreamDocument<Document> raw;
|
||||
|
||||
private final Class<T> targetType;
|
||||
private final MongoConverter converter;
|
||||
|
||||
// accessed through CONVERTED_UPDATER.
|
||||
private volatile @Nullable T converted;
|
||||
|
||||
/**
|
||||
* @param raw can be {@literal null}.
|
||||
* @param targetType must not be {@literal null}.
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
public ChangeStreamEvent(@Nullable ChangeStreamDocument<Document> raw, Class<T> targetType,
|
||||
MongoConverter converter) {
|
||||
|
||||
this.raw = raw;
|
||||
this.targetType = targetType;
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw {@link ChangeStreamDocument} as emitted by the driver.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public ChangeStreamDocument<Document> getRaw() {
|
||||
return raw;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ChangeStreamDocument#getClusterTime() cluster time} as {@link Instant} the event was emitted at.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public Instant getTimestamp() {
|
||||
|
||||
return getBsonTimestamp() != null ? converter.getConversionService().convert(raw.getClusterTime(), Instant.class)
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ChangeStreamDocument#getClusterTime() cluster time}.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public BsonTimestamp getBsonTimestamp() {
|
||||
return raw != null ? raw.getClusterTime() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ChangeStreamDocument#getResumeToken() resume token} for this event.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public BsonValue getResumeToken() {
|
||||
return raw != null ? raw.getResumeToken() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ChangeStreamDocument#getOperationType() operation type} for this event.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public OperationType getOperationType() {
|
||||
return raw != null ? raw.getOperationType() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the database name the event was originated at.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getDatabaseName() {
|
||||
return raw != null ? raw.getNamespace().getDatabaseName() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the collection name the event was originated at.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getCollectionName() {
|
||||
return raw != null ? raw.getNamespace().getCollectionName() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the potentially converted {@link ChangeStreamDocument#getFullDocument()}.
|
||||
*
|
||||
* @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocument()} is
|
||||
* {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public T getBody() {
|
||||
|
||||
if (raw == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Document fullDocument = raw.getFullDocument();
|
||||
|
||||
if (fullDocument == null) {
|
||||
return targetType.cast(fullDocument);
|
||||
}
|
||||
|
||||
return getConverted(fullDocument);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private T getConverted(Document fullDocument) {
|
||||
return (T) doGetConverted(fullDocument);
|
||||
}
|
||||
|
||||
private Object doGetConverted(Document fullDocument) {
|
||||
|
||||
Object result = CONVERTED_UPDATER.get(this);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) {
|
||||
|
||||
result = converter.read(targetType, fullDocument);
|
||||
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||
}
|
||||
|
||||
if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) {
|
||||
|
||||
result = converter.getConversionService().convert(fullDocument, targetType);
|
||||
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("No converter found capable of converting %s to %s",
|
||||
fullDocument.getClass(), targetType));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}';
|
||||
}
|
||||
}
|
||||
@@ -1,367 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocument;
|
||||
|
||||
/**
|
||||
* Options applicable to MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Streams</a>. Intended
|
||||
* to be used along with {@link org.springframework.data.mongodb.core.messaging.ChangeStreamRequest} in a sync world as
|
||||
* well {@link ReactiveMongoOperations} if you prefer it that way.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@EqualsAndHashCode
|
||||
public class ChangeStreamOptions {
|
||||
|
||||
private @Nullable Object filter;
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
|
||||
protected ChangeStreamOptions() {}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Object> getFilter() {
|
||||
return Optional.ofNullable(filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<BsonValue> getResumeToken() {
|
||||
return Optional.ofNullable(resumeToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<FullDocument> getFullDocumentLookup() {
|
||||
return Optional.ofNullable(fullDocumentLookup);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.ofNullable(collation);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Instant> getResumeTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, Instant.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<BsonTimestamp> getResumeBsonTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be started after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isStartAfter() {
|
||||
return Resume.START_AFTER.equals(resume);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be resumed after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isResumeAfter() {
|
||||
return Resume.RESUME_AFTER.equals(resume);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return empty {@link ChangeStreamOptions}.
|
||||
*/
|
||||
public static ChangeStreamOptions empty() {
|
||||
return ChangeStreamOptions.builder().build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a shiny new {@link ChangeStreamOptionsBuilder} and start defining options in this fancy fluent way. Just
|
||||
* don't forget to call {@link ChangeStreamOptionsBuilder#build() build()} when your're done.
|
||||
*
|
||||
* @return new instance of {@link ChangeStreamOptionsBuilder}.
|
||||
*/
|
||||
public static ChangeStreamOptionsBuilder builder() {
|
||||
return new ChangeStreamOptionsBuilder();
|
||||
}
|
||||
|
||||
private static <T> T asTimestampOfType(Object timestamp, Class<T> targetType) {
|
||||
return targetType.cast(doGetTimestamp(timestamp, targetType));
|
||||
}
|
||||
|
||||
private static <T> Object doGetTimestamp(Object timestamp, Class<T> targetType) {
|
||||
|
||||
if (ClassUtils.isAssignableValue(targetType, timestamp)) {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
if (timestamp instanceof Instant) {
|
||||
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
|
||||
}
|
||||
|
||||
if (timestamp instanceof BsonTimestamp) {
|
||||
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
"o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
enum Resume {
|
||||
|
||||
UNDEFINED,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#startAfter(BsonDocument)
|
||||
*/
|
||||
START_AFTER,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#resumeAfter(BsonDocument)
|
||||
*/
|
||||
RESUME_AFTER
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@link ChangeStreamOptions}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class ChangeStreamOptionsBuilder {
|
||||
|
||||
private @Nullable Object filter;
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
|
||||
private ChangeStreamOptionsBuilder() {}
|
||||
|
||||
/**
|
||||
* Set the collation to use.
|
||||
*
|
||||
* @param collation must not be {@literal null} nor {@literal empty}.
|
||||
* @return this.
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder collation(Collation collation) {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null nor empty!");
|
||||
|
||||
this.collation = collation;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter to apply.
|
||||
* <p/>
|
||||
* Fields on aggregation expression root level are prefixed to map to fields contained in
|
||||
* {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns},
|
||||
* {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken
|
||||
* as given, during the mapping procedure. You may want to have a look at the
|
||||
* <a href="https://docs.mongodb.com/manual/reference/change-events/">structure of Change Events</a>.
|
||||
* <p/>
|
||||
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are
|
||||
* mapped to domain type fields.
|
||||
*
|
||||
* @param filter the {@link Aggregation Aggregation pipeline} to apply for filtering events. Must not be
|
||||
* {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder filter(Aggregation filter) {
|
||||
|
||||
Assert.notNull(filter, "Filter must not be null!");
|
||||
|
||||
this.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the plain filter chain to apply.
|
||||
*
|
||||
* @param filter must not be {@literal null} nor contain {@literal null} values.
|
||||
* @return this.
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder filter(Document... filter) {
|
||||
|
||||
Assert.noNullElements(filter, "Filter must not contain null values");
|
||||
|
||||
this.filter = Arrays.asList(filter);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token (typically a {@link org.bson.BsonDocument} containing a {@link org.bson.BsonBinary binary
|
||||
* token}) after which to start with listening.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) {
|
||||
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null!");
|
||||
|
||||
this.resumeToken = resumeToken;
|
||||
|
||||
if (this.resume == Resume.UNDEFINED) {
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link FullDocument} lookup to {@link FullDocument#UPDATE_LOOKUP}.
|
||||
*
|
||||
* @return this.
|
||||
* @see #fullDocumentLookup(FullDocument)
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder returnFullDocumentOnUpdate() {
|
||||
return fullDocumentLookup(FullDocument.UPDATE_LOOKUP);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link FullDocument} lookup to use.
|
||||
*
|
||||
* @param lookup must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) {
|
||||
|
||||
Assert.notNull(lookup, "Lookup must not be null!");
|
||||
|
||||
this.fullDocumentLookup = lookup;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cluster time to resume from.
|
||||
*
|
||||
* @param resumeTimestamp must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cluster time to resume from.
|
||||
*
|
||||
* @param resumeTimestamp must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to continue emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to start emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder startAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.START_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the built {@link ChangeStreamOptions}
|
||||
*/
|
||||
public ChangeStreamOptions build() {
|
||||
|
||||
ChangeStreamOptions options = new ChangeStreamOptions();
|
||||
|
||||
options.filter = this.filter;
|
||||
options.resumeToken = this.resumeToken;
|
||||
options.fullDocumentLookup = this.fullDocumentLookup;
|
||||
options.collation = this.collation;
|
||||
options.resumeTimestamp = this.resumeTimestamp;
|
||||
options.resume = this.resume;
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,27 +15,18 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.model.ValidationAction;
|
||||
import com.mongodb.client.model.ValidationLevel;
|
||||
|
||||
/**
|
||||
* Provides a simple wrapper to encapsulate the variety of settings you can use when creating a collection.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Andreas Zink
|
||||
*/
|
||||
public class CollectionOptions {
|
||||
|
||||
@@ -43,7 +34,6 @@ public class CollectionOptions {
|
||||
private @Nullable Long size;
|
||||
private @Nullable Boolean capped;
|
||||
private @Nullable Collation collation;
|
||||
private ValidationOptions validationOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
@@ -56,17 +46,16 @@ public class CollectionOptions {
|
||||
*/
|
||||
@Deprecated
|
||||
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none());
|
||||
this(size, maxDocuments, capped, null);
|
||||
}
|
||||
|
||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||
@Nullable Collation collation, ValidationOptions validationOptions) {
|
||||
@Nullable Collation collation) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
this.capped = capped;
|
||||
this.collation = collation;
|
||||
this.validationOptions = validationOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -80,7 +69,7 @@ public class CollectionOptions {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none());
|
||||
return new CollectionOptions(null, null, null, collation);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -90,7 +79,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none());
|
||||
return new CollectionOptions(null, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -101,7 +90,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, true, collation);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,7 +101,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -123,7 +112,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -134,127 +123,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(@Nullable Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationOptions} set to given
|
||||
* {@link MongoJsonSchema}.
|
||||
*
|
||||
* @param schema can be {@literal null}.
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions schema(@Nullable MongoJsonSchema schema) {
|
||||
return validator(Validator.schema(schema));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationOptions} set to given
|
||||
* {@link Validator}.
|
||||
*
|
||||
* @param validator can be {@literal null}.
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions validator(@Nullable Validator validator) {
|
||||
return validation(validationOptions.validator(validator));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationLevel} set to
|
||||
* {@link ValidationLevel#OFF}.
|
||||
*
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions disableValidation() {
|
||||
return schemaValidationLevel(ValidationLevel.OFF);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationLevel} set to
|
||||
* {@link ValidationLevel#STRICT}.
|
||||
*
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions strictValidation() {
|
||||
return schemaValidationLevel(ValidationLevel.STRICT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationLevel} set to
|
||||
* {@link ValidationLevel#MODERATE}.
|
||||
*
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions moderateValidation() {
|
||||
return schemaValidationLevel(ValidationLevel.MODERATE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationAction} set to
|
||||
* {@link ValidationAction#WARN}.
|
||||
*
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions warnOnValidationError() {
|
||||
return schemaValidationAction(ValidationAction.WARN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationAction} set to
|
||||
* {@link ValidationAction#ERROR}.
|
||||
*
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions failOnValidationError() {
|
||||
return schemaValidationAction(ValidationAction.ERROR);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationLevel} set given
|
||||
* {@link ValidationLevel}.
|
||||
*
|
||||
* @param validationLevel must not be {@literal null}.
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) {
|
||||
|
||||
Assert.notNull(validationLevel, "ValidationLevel must not be null!");
|
||||
return validation(validationOptions.validationLevel(validationLevel));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and {@code validationAction} set given
|
||||
* {@link ValidationAction}.
|
||||
*
|
||||
* @param validationAction must not be {@literal null}.
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions schemaValidationAction(ValidationAction validationAction) {
|
||||
|
||||
Assert.notNull(validationAction, "ValidationAction must not be null!");
|
||||
return validation(validationOptions.validationAction(validationAction));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link ValidationOptions}.
|
||||
*
|
||||
* @param validationOptions must not be {@literal null}. Use {@link ValidationOptions#none()} to remove validation.
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -294,104 +163,4 @@ public class CollectionOptions {
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.ofNullable(collation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link MongoJsonSchema} for the collection.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 2.1
|
||||
*/
|
||||
public Optional<ValidationOptions> getValidationOptions() {
|
||||
return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of ValidationOptions options.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Andreas Zink
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
public static class ValidationOptions {
|
||||
|
||||
private static final ValidationOptions NONE = new ValidationOptions(null, null, null);
|
||||
|
||||
private final @Nullable Validator validator;
|
||||
private final @Nullable ValidationLevel validationLevel;
|
||||
private final @Nullable ValidationAction validationAction;
|
||||
|
||||
/**
|
||||
* Create an empty {@link ValidationOptions}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static ValidationOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the {@link Validator} to be used for document validation.
|
||||
*
|
||||
* @param validator can be {@literal null}.
|
||||
* @return new instance of {@link ValidationOptions}.
|
||||
*/
|
||||
public ValidationOptions validator(@Nullable Validator validator) {
|
||||
return new ValidationOptions(validator, validationLevel, validationAction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the validation level to apply.
|
||||
*
|
||||
* @param validationLevel can be {@literal null}.
|
||||
* @return new instance of {@link ValidationOptions}.
|
||||
*/
|
||||
public ValidationOptions validationLevel(ValidationLevel validationLevel) {
|
||||
return new ValidationOptions(validator, validationLevel, validationAction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the validation action to take.
|
||||
*
|
||||
* @param validationAction can be {@literal null}.
|
||||
* @return new instance of {@link ValidationOptions}.
|
||||
*/
|
||||
public ValidationOptions validationAction(ValidationAction validationAction) {
|
||||
return new ValidationOptions(validator, validationLevel, validationAction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link Validator} to use.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public Optional<Validator> getValidator() {
|
||||
return Optional.ofNullable(validator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@code validationLevel} to apply.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<ValidationLevel> getValidationLevel() {
|
||||
return Optional.ofNullable(validationLevel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@code validationAction} to perform.
|
||||
*
|
||||
* @return @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<ValidationAction> getValidationAction() {
|
||||
return Optional.ofNullable(validationAction);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if no arguments set.
|
||||
*/
|
||||
boolean isEmpty() {
|
||||
return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -23,18 +26,11 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
@@ -42,13 +38,18 @@ import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.*;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOneModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
@@ -57,8 +58,6 @@ import lombok.Value;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -66,7 +65,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
private final BulkOperationContext bulkOperationContext;
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
private final List<WriteModel<Document>> models = new ArrayList<>();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
@@ -123,9 +122,16 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(document);
|
||||
addModel(source, new InsertOneModel<>(getMappedObject(source)));
|
||||
if (document instanceof Document) {
|
||||
|
||||
models.add(new InsertOneModel<>((Document) document));
|
||||
return this;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
mongoOperations.getConverter().write(document, sink);
|
||||
|
||||
models.add(new InsertOneModel<>(sink));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -239,7 +245,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
|
||||
addModel(query, new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
models.add(new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -260,29 +266,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(replacement);
|
||||
addModel(source,
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(source), replaceOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
@@ -292,44 +275,23 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
try {
|
||||
|
||||
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
||||
MongoCollection<Document> collection = mongoOperations.getCollection(collectionName);
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
return collection.bulkWrite(models.stream().map(this::mapWriteModel).collect(Collectors.toList()), bulkOptions);
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
} catch (BulkWriteException o_O) {
|
||||
|
||||
DataAccessException toThrow = exceptionTranslator.translateExceptionIfPossible(o_O);
|
||||
throw toThrow == null ? o_O : toThrow;
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
private BulkWriteResult bulkWriteTo(MongoCollection<Document> collection) {
|
||||
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
}
|
||||
|
||||
private WriteModel<Document> extractAndMapWriteModel(SourceAwareWriteModelHolder it) {
|
||||
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
}
|
||||
|
||||
return mapWriteModel(it.getModel());
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
@@ -349,9 +311,9 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
|
||||
if (multi) {
|
||||
addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
models.add(new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
} else {
|
||||
addModel(update, new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
models.add(new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
}
|
||||
|
||||
return this;
|
||||
@@ -400,76 +362,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
|
||||
mongoOperations.getConverter().write(source, sink);
|
||||
return sink;
|
||||
}
|
||||
|
||||
private void addModel(Object source, WriteModel<Document> model) {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (null != bulkOperationContext.getEventPublisher()) {
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
}
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
@@ -499,20 +391,5 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
@NonNull Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
@NonNull QueryMapper queryMapper;
|
||||
@NonNull UpdateMapper updateMapper;
|
||||
ApplicationEventPublisher eventPublisher;
|
||||
EntityCallbacks entityCallbacks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@Value
|
||||
private static class SourceAwareWriteModelHolder {
|
||||
|
||||
Object source;
|
||||
WriteModel<Document> model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.mongodb.core.MongoTemplate.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
@@ -48,22 +50,18 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final String PARTIAL_FILTER_EXPRESSION_KEY = "partialFilterExpression";
|
||||
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final String collectionName;
|
||||
private final QueryMapper mapper;
|
||||
private final @Nullable Class<?> type;
|
||||
|
||||
private MongoOperations mongoOperations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultIndexOperations}.
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @param queryMapper must not be {@literal null}.
|
||||
* @deprecated since 2.1. Please use
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
this(mongoDbFactory, collectionName, queryMapper, null);
|
||||
}
|
||||
@@ -76,10 +74,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* @param queryMapper must not be {@literal null}.
|
||||
* @param type Type used for mapping potential partial index filter expression. Can be {@literal null}.
|
||||
* @since 1.10
|
||||
* @deprecated since 2.1. Please use
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
@Nullable Class<?> type) {
|
||||
|
||||
@@ -87,29 +82,10 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
Assert.notNull(collectionName, "Collection name can not be null!");
|
||||
Assert.notNull(queryMapper, "QueryMapper must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.collectionName = collectionName;
|
||||
this.mapper = queryMapper;
|
||||
this.type = type;
|
||||
this.mongoOperations = new MongoTemplate(mongoDbFactory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultIndexOperations}.
|
||||
*
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @param type can be {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.mapper = new QueryMapper(mongoOperations.getConverter());
|
||||
this.collectionName = collectionName;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -120,15 +96,19 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return execute(collection -> {
|
||||
|
||||
MongoPersistentEntity<?> entity = lookupPersistentEntity(type, collectionName);
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
|
||||
Document mappedKeys = mapper.getMappedObject(indexDefinition.getIndexKeys(), entity);
|
||||
return collection.createIndex(mappedKeys, indexOptions);
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
ops.partialFilterExpression(mapper.getMappedObject((Document) indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY),
|
||||
lookupPersistentEntity(type, collectionName)));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -188,7 +168,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private List<IndexInfo> getIndexData(MongoCursor<Document> cursor) {
|
||||
|
||||
List<IndexInfo> indexInfoList = new ArrayList<>();
|
||||
List<IndexInfo> indexInfoList = new ArrayList<IndexInfo>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
|
||||
@@ -207,31 +187,11 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
Assert.notNull(callback, "CollectionCallback must not be null!");
|
||||
|
||||
if (type != null) {
|
||||
return mongoOperations.execute(type, callback);
|
||||
try {
|
||||
MongoCollection<Document> collection = mongoDbFactory.getDb().getCollection(collectionName);
|
||||
return callback.doInCollection(collection);
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, mongoDbFactory.getExceptionTranslator());
|
||||
}
|
||||
|
||||
return mongoOperations.execute(collectionName, callback);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
mapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,16 +94,23 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), indexOptions);
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
|
||||
ops = ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject(indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY, Document.class), entity));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
|
||||
}).next();
|
||||
}
|
||||
@@ -119,24 +126,21 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String)
|
||||
*/
|
||||
public Mono<Void> dropIndex(final String name) {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes()
|
||||
*/
|
||||
public Mono<Void> dropAllIndexes() {
|
||||
return dropIndex("*");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo()
|
||||
*/
|
||||
public Flux<IndexInfo> getIndexInfo() {
|
||||
@@ -144,25 +148,4 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) //
|
||||
.map(IndexConverters.documentToIndexInfoConverter()::convert);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,15 +42,13 @@ import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ExecutableMongoScript}.
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
class DefaultScriptOperations implements ScriptOperations {
|
||||
|
||||
private static final String SCRIPT_COLLECTION_NAME = "system.js";
|
||||
|
||||
@@ -1,837 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.mapping.IdentifierAccessor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoTemplate
|
||||
* @see ReactiveMongoTemplate
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class EntityOperations {
|
||||
|
||||
private static final String ID_FIELD = "_id";
|
||||
|
||||
private final @NonNull MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Entity} for the given bean.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public <T> Entity<T> forEntity(T entity) {
|
||||
|
||||
Assert.notNull(entity, "Bean must not be null!");
|
||||
|
||||
if (entity instanceof String) {
|
||||
return new UnmappedEntity(parse(entity.toString()));
|
||||
}
|
||||
|
||||
if (entity instanceof Map) {
|
||||
return new SimpleMappedEntity((Map<String, Object>) entity);
|
||||
}
|
||||
|
||||
return MappedEntity.of(entity, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AdaptibleEntity} for the given bean and {@link ConversionService}.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param conversionService must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public <T> AdaptibleEntity<T> forEntity(T entity, ConversionService conversionService) {
|
||||
|
||||
Assert.notNull(entity, "Bean must not be null!");
|
||||
Assert.notNull(conversionService, "ConversionService must not be null!");
|
||||
|
||||
if (entity instanceof String) {
|
||||
return new UnmappedEntity(parse(entity.toString()));
|
||||
}
|
||||
|
||||
if (entity instanceof Map) {
|
||||
return new SimpleMappedEntity((Map<String, Object>) entity);
|
||||
}
|
||||
|
||||
return AdaptibleMappedEntity.of(entity, context, conversionService);
|
||||
}
|
||||
|
||||
public String determineCollectionName(@Nullable Class<?> entityClass) {
|
||||
|
||||
if (entityClass == null) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"No class parameter provided, entity collection can't be determined!");
|
||||
}
|
||||
|
||||
return context.getRequiredPersistentEntity(entityClass).getCollection();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection name to be used for the given entity.
|
||||
*
|
||||
* @param obj can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public String determineEntityCollectionName(@Nullable Object obj) {
|
||||
return null == obj ? null : determineCollectionName(obj.getClass());
|
||||
}
|
||||
|
||||
public Query getByIdInQuery(Collection<?> entities) {
|
||||
|
||||
MultiValueMap<String, Object> byIds = new LinkedMultiValueMap<>();
|
||||
|
||||
entities.stream() //
|
||||
.map(this::forEntity) //
|
||||
.forEach(it -> byIds.add(it.getIdFieldName(), it.getId()));
|
||||
|
||||
Criteria[] criterias = byIds.entrySet().stream() //
|
||||
.map(it -> Criteria.where(it.getKey()).in(it.getValue())) //
|
||||
.toArray(Criteria[]::new);
|
||||
|
||||
return new Query(criterias.length == 1 ? criterias[0] : new Criteria().orOperator(criterias));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the identifier property. Considers mapping information but falls back to the MongoDB default of
|
||||
* {@code _id} if no identifier property can be found.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public String getIdPropertyName(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(type);
|
||||
|
||||
if (persistentEntity != null && persistentEntity.getIdProperty() != null) {
|
||||
return persistentEntity.getRequiredIdProperty().getName();
|
||||
}
|
||||
|
||||
return ID_FIELD;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the name used for {@code $geoNear.distanceField} avoiding clashes with potentially existing properties.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return the name of the distanceField to use. {@literal dis} by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
public String nearQueryDistanceFieldName(Class<?> domainType) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(domainType);
|
||||
if (persistentEntity == null || persistentEntity.getPersistentProperty("dis") == null) {
|
||||
return "dis";
|
||||
}
|
||||
|
||||
String distanceFieldName = "calculated-distance";
|
||||
int counter = 0;
|
||||
while (persistentEntity.getPersistentProperty(distanceFieldName) != null) {
|
||||
distanceFieldName += "-" + (counter++);
|
||||
}
|
||||
|
||||
return distanceFieldName;
|
||||
}
|
||||
|
||||
private static Document parse(String source) {
|
||||
|
||||
try {
|
||||
return Document.parse(source);
|
||||
} catch (org.bson.json.JsonParseException o_O) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
} catch (RuntimeException o_O) {
|
||||
|
||||
// legacy 3.x exception
|
||||
if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
}
|
||||
throw o_O;
|
||||
}
|
||||
}
|
||||
|
||||
public <T> TypedOperations<T> forType(@Nullable Class<T> entityClass) {
|
||||
|
||||
if (entityClass != null) {
|
||||
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(entityClass);
|
||||
|
||||
if (entity != null) {
|
||||
return new TypedEntityOperations(entity);
|
||||
}
|
||||
|
||||
}
|
||||
return UntypedOperations.instance();
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of information about an entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 2.1
|
||||
*/
|
||||
interface Entity<T> {
|
||||
|
||||
/**
|
||||
* Returns the field name of the identifier of the entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String getIdFieldName();
|
||||
|
||||
/**
|
||||
* Returns the identifier of the entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Object getId();
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to find the entity by its identifier.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Query getByIdQuery();
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to remove an entity by its {@literal id} and if applicable {@literal version}.
|
||||
*
|
||||
* @return the {@link Query} to use for removing the entity. Never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default Query getRemoveByQuery() {
|
||||
return isVersionedEntity() ? getQueryForVersion() : getByIdQuery();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to find the entity in its current version.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Query getQueryForVersion();
|
||||
|
||||
/**
|
||||
* Maps the backing entity into a {@link MappedDocument} using the given {@link MongoWriter}.
|
||||
*
|
||||
* @param writer must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
MappedDocument toMappedDocument(MongoWriter<? super T> writer);
|
||||
|
||||
/**
|
||||
* Asserts that the identifier type is updatable in case its not already set.
|
||||
*/
|
||||
default void assertUpdateableIdIfNotSet() {}
|
||||
|
||||
/**
|
||||
* Returns whether the entity is versioned, i.e. if it contains a version property.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
default boolean isVersionedEntity() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the version if the entity {@link #isVersionedEntity() has a version property}.
|
||||
*
|
||||
* @return the entity version. Can be {@literal null}.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property. Make sure to check
|
||||
* {@link #isVersionedEntity()}.
|
||||
*/
|
||||
@Nullable
|
||||
Object getVersion();
|
||||
|
||||
/**
|
||||
* Returns the underlying bean.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
T getBean();
|
||||
|
||||
/**
|
||||
* Returns whether the entity is considered to be new.
|
||||
*
|
||||
* @return
|
||||
* @since 2.1.2
|
||||
*/
|
||||
boolean isNew();
|
||||
}
|
||||
|
||||
/**
|
||||
* Information and commands on an entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 2.1
|
||||
*/
|
||||
interface AdaptibleEntity<T> extends Entity<T> {
|
||||
|
||||
/**
|
||||
* Populates the identifier of the backing entity if it has an identifier property and there's no identifier
|
||||
* currently present.
|
||||
*
|
||||
* @param id must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
T populateIdIfNecessary(@Nullable Object id);
|
||||
|
||||
/**
|
||||
* Initializes the version property of the of the current entity if available.
|
||||
*
|
||||
* @return the entity with the version property updated if available.
|
||||
*/
|
||||
T initializeVersionProperty();
|
||||
|
||||
/**
|
||||
* Increments the value of the version property if available.
|
||||
*
|
||||
* @return the entity with the version property incremented if available.
|
||||
*/
|
||||
T incrementVersion();
|
||||
|
||||
/**
|
||||
* Returns the current version value if the entity has a version property.
|
||||
*
|
||||
* @return the current version or {@literal null} in case it's uninitialized.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property.
|
||||
*/
|
||||
@Nullable
|
||||
Number getVersion();
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
private static class UnmappedEntity<T extends Map<String, Object>> implements AdaptibleEntity<T> {
|
||||
|
||||
private final T map;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName()
|
||||
*/
|
||||
@Override
|
||||
public String getIdFieldName() {
|
||||
return ID_FIELD;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getId()
|
||||
*/
|
||||
@Override
|
||||
public Object getId() {
|
||||
return map.get(ID_FIELD);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getByIdQuery()
|
||||
*/
|
||||
@Override
|
||||
public Query getByIdQuery() {
|
||||
return Query.query(Criteria.where(ID_FIELD).is(map.get(ID_FIELD)));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#populateIdIfNecessary(java.lang.Object)
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public T populateIdIfNecessary(@Nullable Object id) {
|
||||
|
||||
map.put(ID_FIELD, id);
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getQueryForVersion()
|
||||
*/
|
||||
@Override
|
||||
public Query getQueryForVersion() {
|
||||
throw new MappingException("Cannot query for version on plain Documents!");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter)
|
||||
*/
|
||||
@Override
|
||||
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
||||
return MappedDocument.of(map instanceof Document //
|
||||
? (Document) map //
|
||||
: new Document(map));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#initializeVersionProperty()
|
||||
*/
|
||||
@Override
|
||||
public T initializeVersionProperty() {
|
||||
return map;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#getVersion()
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
public Number getVersion() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#incrementVersion()
|
||||
*/
|
||||
@Override
|
||||
public T incrementVersion() {
|
||||
return map;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getBean()
|
||||
*/
|
||||
@Override
|
||||
public T getBean() {
|
||||
return map;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||
*/
|
||||
@Override
|
||||
public boolean isNew() {
|
||||
return map.get(ID_FIELD) != null;
|
||||
}
|
||||
}
|
||||
|
||||
private static class SimpleMappedEntity<T extends Map<String, Object>> extends UnmappedEntity<T> {
|
||||
|
||||
SimpleMappedEntity(T map) {
|
||||
super(map);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
||||
|
||||
T bean = getBean();
|
||||
bean = (T) (bean instanceof Document //
|
||||
? (Document) bean //
|
||||
: new Document(bean));
|
||||
Document document = new Document();
|
||||
writer.write(bean, document);
|
||||
|
||||
return MappedDocument.of(document);
|
||||
}
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor(access = AccessLevel.PROTECTED)
|
||||
private static class MappedEntity<T> implements Entity<T> {
|
||||
|
||||
private final @NonNull MongoPersistentEntity<?> entity;
|
||||
private final @NonNull IdentifierAccessor idAccessor;
|
||||
private final @NonNull PersistentPropertyAccessor<T> propertyAccessor;
|
||||
|
||||
private static <T> MappedEntity<T> of(T bean,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
|
||||
MongoPersistentEntity<?> entity = context.getRequiredPersistentEntity(bean.getClass());
|
||||
IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean);
|
||||
PersistentPropertyAccessor<T> propertyAccessor = entity.getPropertyAccessor(bean);
|
||||
|
||||
return new MappedEntity<>(entity, identifierAccessor, propertyAccessor);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName()
|
||||
*/
|
||||
@Override
|
||||
public String getIdFieldName() {
|
||||
return entity.getRequiredIdProperty().getFieldName();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getId()
|
||||
*/
|
||||
@Override
|
||||
public Object getId() {
|
||||
return idAccessor.getRequiredIdentifier();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getByIdQuery()
|
||||
*/
|
||||
@Override
|
||||
public Query getByIdQuery() {
|
||||
|
||||
if (!entity.hasIdProperty()) {
|
||||
throw new MappingException("No id property found for object of type " + entity.getType() + "!");
|
||||
}
|
||||
|
||||
MongoPersistentProperty idProperty = entity.getRequiredIdProperty();
|
||||
|
||||
return Query.query(Criteria.where(idProperty.getName()).is(getId()));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getQueryForVersion(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Query getQueryForVersion() {
|
||||
|
||||
MongoPersistentProperty idProperty = entity.getRequiredIdProperty();
|
||||
MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty();
|
||||
|
||||
return new Query(Criteria.where(idProperty.getName()).is(getId())//
|
||||
.and(versionProperty.getName()).is(getVersion()));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter)
|
||||
*/
|
||||
@Override
|
||||
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
||||
|
||||
T bean = propertyAccessor.getBean();
|
||||
|
||||
Document document = new Document();
|
||||
writer.write(bean, document);
|
||||
|
||||
if (document.containsKey(ID_FIELD) && document.get(ID_FIELD) == null) {
|
||||
document.remove(ID_FIELD);
|
||||
}
|
||||
|
||||
return MappedDocument.of(document);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#assertUpdateableIdIfNotSet()
|
||||
*/
|
||||
public void assertUpdateableIdIfNotSet() {
|
||||
|
||||
if (!entity.hasIdProperty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
MongoPersistentProperty property = entity.getRequiredIdProperty();
|
||||
Object propertyValue = idAccessor.getIdentifier();
|
||||
|
||||
if (propertyValue != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
String.format("Cannot autogenerate id of type %s for entity of type %s!", property.getType().getName(),
|
||||
entity.getType().getName()));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#isVersionedEntity()
|
||||
*/
|
||||
@Override
|
||||
public boolean isVersionedEntity() {
|
||||
return entity.hasVersionProperty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getVersion()
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
public Object getVersion() {
|
||||
return propertyAccessor.getProperty(entity.getRequiredVersionProperty());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getBean()
|
||||
*/
|
||||
@Override
|
||||
public T getBean() {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||
*/
|
||||
@Override
|
||||
public boolean isNew() {
|
||||
return entity.isNew(propertyAccessor.getBean());
|
||||
}
|
||||
}
|
||||
|
||||
private static class AdaptibleMappedEntity<T> extends MappedEntity<T> implements AdaptibleEntity<T> {
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
private final ConvertingPropertyAccessor<T> propertyAccessor;
|
||||
private final IdentifierAccessor identifierAccessor;
|
||||
|
||||
private AdaptibleMappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor identifierAccessor,
|
||||
ConvertingPropertyAccessor<T> propertyAccessor) {
|
||||
|
||||
super(entity, identifierAccessor, propertyAccessor);
|
||||
|
||||
this.entity = entity;
|
||||
this.propertyAccessor = propertyAccessor;
|
||||
this.identifierAccessor = identifierAccessor;
|
||||
}
|
||||
|
||||
private static <T> AdaptibleEntity<T> of(T bean,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
ConversionService conversionService) {
|
||||
|
||||
MongoPersistentEntity<?> entity = context.getRequiredPersistentEntity(bean.getClass());
|
||||
IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean);
|
||||
PersistentPropertyAccessor<T> propertyAccessor = entity.getPropertyAccessor(bean);
|
||||
|
||||
return new AdaptibleMappedEntity<>(entity, identifierAccessor,
|
||||
new ConvertingPropertyAccessor<>(propertyAccessor, conversionService));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#populateIdIfNecessary(java.lang.Object)
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public T populateIdIfNecessary(@Nullable Object id) {
|
||||
|
||||
if (id == null) {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
MongoPersistentProperty idProperty = entity.getIdProperty();
|
||||
if (idProperty == null) {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
if (identifierAccessor.getIdentifier() != null) {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
propertyAccessor.setProperty(idProperty, id);
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.MappedEntity#getVersion()
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
public Number getVersion() {
|
||||
|
||||
MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty();
|
||||
|
||||
return propertyAccessor.getProperty(versionProperty, Number.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#initializeVersionProperty()
|
||||
*/
|
||||
@Override
|
||||
public T initializeVersionProperty() {
|
||||
|
||||
if (!entity.hasVersionProperty()) {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty();
|
||||
|
||||
propertyAccessor.setProperty(versionProperty, versionProperty.getType().isPrimitive() ? 1 : 0);
|
||||
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#incrementVersion()
|
||||
*/
|
||||
@Override
|
||||
public T incrementVersion() {
|
||||
|
||||
MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty();
|
||||
Number version = getVersion();
|
||||
Number nextVersion = version == null ? 0 : version.longValue() + 1;
|
||||
|
||||
propertyAccessor.setProperty(versionProperty, nextVersion);
|
||||
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-specific operations abstraction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @param <T>
|
||||
* @since 2.2
|
||||
*/
|
||||
interface TypedOperations<T> {
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} for the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation();
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} from the given {@link Query} and fall back to the collation configured for
|
||||
* the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom
|
||||
* conversions).
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
enum UntypedOperations implements TypedOperations<Object> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public static <T> TypedOperations<T> instance() {
|
||||
return (TypedOperations) INSTANCE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} backed by {@link MongoPersistentEntity}.
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class TypedEntityOperations<T> implements TypedOperations<T> {
|
||||
|
||||
private final @NonNull MongoPersistentEntity<T> entity;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query.getCollation().isPresent()) {
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -35,10 +35,22 @@ import org.springframework.util.StringUtils;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableAggregationOperationSupport implements ExecutableAggregationOperation {
|
||||
|
||||
private final @NonNull MongoTemplate template;
|
||||
private final MongoTemplate template;
|
||||
|
||||
/**
|
||||
* Create new instance of {@link ExecutableAggregationOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if template is {@literal null}.
|
||||
*/
|
||||
ExecutableAggregationOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -119,11 +131,11 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper
|
||||
TypedAggregation<?> typedAggregation = (TypedAggregation<?>) aggregation;
|
||||
|
||||
if (typedAggregation.getInputType() != null) {
|
||||
return template.getCollectionName(typedAggregation.getInputType());
|
||||
return template.determineCollectionName(typedAggregation.getInputType());
|
||||
}
|
||||
}
|
||||
|
||||
return template.getCollectionName(domainType);
|
||||
return template.determineCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,14 +19,11 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* {@link ExecutableFindOperation} allows creation and execution of MongoDB find operations in a fluent API style.
|
||||
* <br />
|
||||
@@ -205,7 +202,7 @@ public interface ExecutableFindOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface FindWithProjection<T> extends FindWithQuery<T>, FindDistinct {
|
||||
interface FindWithProjection<T> extends FindWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to. <br />
|
||||
@@ -217,101 +214,6 @@ public interface ExecutableFindOperation {
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> FindWithQuery<R> as(Class<R> resultType);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Distinct Find support.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface FindDistinct {
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view.
|
||||
*
|
||||
* @param field name of the field. Must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if field is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<Object> distinct(String field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type override. Optional.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface DistinctWithProjection {
|
||||
|
||||
/**
|
||||
* Define the target type the result should be mapped to. <br />
|
||||
* Skip this step if you are anyway fine with the default conversion.
|
||||
* <dl>
|
||||
* <dt>{@link Object} (the default)</dt>
|
||||
* <dd>Result is mapped according to the {@link org.bson.BsonType} converting eg. {@link org.bson.BsonString} into
|
||||
* plain {@link String}, {@link org.bson.BsonInt64} to {@link Long}, etc. always picking the most concrete type with
|
||||
* respect to the domain types property.<br />
|
||||
* Any {@link org.bson.BsonType#DOCUMENT} is run through the {@link org.springframework.data.convert.EntityReader}
|
||||
* to obtain the domain type. <br />
|
||||
* Using {@link Object} also works for non strictly typed fields. Eg. a mixture different types like fields using
|
||||
* {@link String} in one {@link org.bson.Document} while {@link Long} in another.</dd>
|
||||
* <dt>Any Simple type like {@link String} or {@link Long}.</dt>
|
||||
* <dd>The result is mapped directly by the MongoDB Java driver and the {@link org.bson.codecs.CodeCodec Codecs} in
|
||||
* place. This works only for results where all documents considered for the operation use the very same type for
|
||||
* the field.</dd>
|
||||
* <dt>Any Domain type</dt>
|
||||
* <dd>Domain types can only be mapped if the if the result of the actual {@code distinct()} operation returns
|
||||
* {@link org.bson.BsonType#DOCUMENT}.</dd>
|
||||
* <dt>{@link org.bson.BsonValue}</dt>
|
||||
* <dd>Using {@link org.bson.BsonValue} allows retrieval of the raw driver specific format, which returns eg.
|
||||
* {@link org.bson.BsonString}.</dd>
|
||||
* </dl>
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> TerminatingDistinct<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result restrictions. Optional.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface DistinctWithQuery<T> extends DistinctWithProjection {
|
||||
|
||||
/**
|
||||
* Set the filter query to be used.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Terminating distinct find operations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface TerminatingDistinct<T> extends DistinctWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Get all matching distinct field values.
|
||||
*
|
||||
* @return empty {@link List} if not match found. Never {@literal null}.
|
||||
* @throws DataAccessException if eg. result cannot be converted correctly which may happen if the document contains
|
||||
* {@link String} whereas the result type is specified as {@link Long}.
|
||||
*/
|
||||
List<T> all();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -320,5 +222,5 @@ public interface ExecutableFindOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface ExecutableFind<T> extends FindWithCollection<T>, FindWithProjection<T>, FindDistinct {}
|
||||
interface ExecutableFind<T> extends FindWithCollection<T>, FindWithProjection<T> {}
|
||||
}
|
||||
|
||||
@@ -45,12 +45,24 @@ import com.mongodb.client.FindIterable;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final @NonNull MongoTemplate template;
|
||||
private final MongoTemplate template;
|
||||
|
||||
/**
|
||||
* Create new {@link ExecutableFindOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if template is {@literal null}.
|
||||
*/
|
||||
ExecutableFindOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -193,19 +205,6 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
return template.exists(query, domainType, getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindDistinct#distinct(java.lang.String)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public TerminatingDistinct<Object> distinct(String field) {
|
||||
|
||||
Assert.notNull(field, "Field must not be null!");
|
||||
|
||||
return new DistinctOperationSupport(this, field);
|
||||
}
|
||||
|
||||
private List<T> doFind(@Nullable CursorPreparer preparer) {
|
||||
|
||||
Document queryObject = query.getQueryObject();
|
||||
@@ -215,12 +214,6 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
getCursorPreparer(query, preparer));
|
||||
}
|
||||
|
||||
private List<T> doFindDistinct(String field) {
|
||||
|
||||
return template.findDistinct(query, field, getCollectionName(), domainType,
|
||||
returnType == domainType ? (Class<T>) Object.class : returnType);
|
||||
}
|
||||
|
||||
private CloseableIterator<T> doStream() {
|
||||
return template.doStream(query, domainType, getCollectionName(), returnType);
|
||||
}
|
||||
@@ -230,7 +223,7 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
|
||||
private String asString() {
|
||||
@@ -268,54 +261,4 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
static class DistinctOperationSupport<T> implements TerminatingDistinct<T> {
|
||||
|
||||
private final String field;
|
||||
private final ExecutableFindSupport<T> delegate;
|
||||
|
||||
public DistinctOperationSupport(ExecutableFindSupport<T> delegate, String field) {
|
||||
|
||||
this.delegate = delegate;
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <R> TerminatingDistinct<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.as(resultType), field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingDistinct<T> matching(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
return new DistinctOperationSupport<>((ExecutableFindSupport<T>) delegate.matching(query), field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingDistinct#all()
|
||||
*/
|
||||
@Override
|
||||
public List<T> all() {
|
||||
return delegate.doFindDistinct(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,19 +63,17 @@ public interface ExecutableInsertOperation {
|
||||
* Insert exactly one object.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException if object is {@literal null}.
|
||||
*/
|
||||
T one(T object);
|
||||
void one(T object);
|
||||
|
||||
/**
|
||||
* Insert a collection of objects.
|
||||
*
|
||||
* @param objects must not be {@literal null}.
|
||||
* @return the inserted objects.
|
||||
* @throws IllegalArgumentException if objects is {@literal null}.
|
||||
*/
|
||||
Collection<? extends T> all(Collection<? extends T> objects);
|
||||
void all(Collection<? extends T> objects);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -37,10 +37,22 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
|
||||
private final @NonNull MongoTemplate template;
|
||||
private final MongoTemplate template;
|
||||
|
||||
/**
|
||||
* Create new {@link ExecutableInsertOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if template is {@literal null}.
|
||||
*/
|
||||
ExecutableInsertOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -72,11 +84,11 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#insert(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public T one(T object) {
|
||||
public void one(T object) {
|
||||
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
|
||||
return template.insert(object, getCollectionName());
|
||||
template.insert(object, getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -84,11 +96,11 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#all(java.util.Collection)
|
||||
*/
|
||||
@Override
|
||||
public Collection<T> all(Collection<? extends T> objects) {
|
||||
public void all(Collection<? extends T> objects) {
|
||||
|
||||
Assert.notNull(objects, "Objects must not be null!");
|
||||
|
||||
return template.insert(objects, getCollectionName());
|
||||
template.insert(objects, getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -129,7 +141,7 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,199 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
* {@link ExecutableMapReduceOperation} allows creation and execution of MongoDB mapReduce operations in a fluent API
|
||||
* style. The starting {@literal domainType} is used for mapping an optional {@link Query} provided via {@code matching}
|
||||
* into the MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping
|
||||
* back the results from the {@link org.bson.Document}. However, it is possible to define an different
|
||||
* {@literal returnType} via {@code as} to mapping the result.<br />
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
* .map("function() { emit(this.id, this.firstname) }")
|
||||
* .reduce("function(id, name) { return sum(id, name); }")
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(query(where("lastname").is("skywalker")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ExecutableMapReduceOperation {
|
||||
|
||||
/**
|
||||
* Start creating a mapReduce operation for the given {@literal domainType}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return new instance of {@link ExecutableFind}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> MapReduceWithMapFunction<T> mapReduce(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Trigger mapReduce execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface TerminatingMapReduce<T> {
|
||||
|
||||
/**
|
||||
* Get the mapReduce results.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<T> all();
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the Javascript {@code function()} used to map matching documents.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithMapFunction<T> {
|
||||
|
||||
/**
|
||||
* Set the Javascript map {@code function()}.
|
||||
*
|
||||
* @param mapFunction must not be {@literal null} nor empty.
|
||||
* @return new instance of {@link MapReduceWithReduceFunction}.
|
||||
* @throws IllegalArgumentException if {@literal mapFunction} is {@literal null} or empty.
|
||||
*/
|
||||
MapReduceWithReduceFunction<T> map(String mapFunction);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the Javascript {@code function()} used to reduce matching documents.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithReduceFunction<T> {
|
||||
|
||||
/**
|
||||
* Set the Javascript map {@code function()}.
|
||||
*
|
||||
* @param reduceFunction must not be {@literal null} nor empty.
|
||||
* @return new instance of {@link ExecutableMapReduce}.
|
||||
* @throws IllegalArgumentException if {@literal reduceFunction} is {@literal null} or empty.
|
||||
*/
|
||||
ExecutableMapReduce<T> reduce(String reduceFunction);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Collection override (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithCollection<T> extends MapReduceWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to perform the mapReduce operation on. <br />
|
||||
* Skip this step to use the default collection derived from the domain type.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link MapReduceWithProjection}.
|
||||
* @throws IllegalArgumentException if collection is {@literal null}.
|
||||
*/
|
||||
MapReduceWithProjection<T> inCollection(String collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Input document filter query (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithQuery<T> extends TerminatingMapReduce<T> {
|
||||
|
||||
/**
|
||||
* Set the filter query to be used.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type override (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithProjection<T> extends MapReduceWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to. <br />
|
||||
* Skip this step if you are anyway only interested in the original domain type.
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> MapReduceWithQuery<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional mapReduce options (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Set additional options to apply to the mapReduce operation.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link ExecutableMapReduce}.
|
||||
* @throws IllegalArgumentException if options is {@literal null}.
|
||||
*/
|
||||
ExecutableMapReduce<T> with(MapReduceOptions options);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ExecutableMapReduce} provides methods for constructing mapReduce operations in a fluent way.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface ExecutableMapReduce<T> extends MapReduceWithMapFunction<T>, MapReduceWithReduceFunction<T>,
|
||||
MapReduceWithCollection<T>, MapReduceWithProjection<T>, MapReduceWithOptions<T> {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,177 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ExecutableMapReduceOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation#mapReduce(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> ExecutableMapReduceSupport<T> mapReduce(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
|
||||
return new ExecutableMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
static class ExecutableMapReduceSupport<T>
|
||||
implements ExecutableMapReduce<T>, MapReduceWithOptions<T>, MapReduceWithCollection<T>,
|
||||
MapReduceWithProjection<T>, MapReduceWithQuery<T>, MapReduceWithReduceFunction<T>, MapReduceWithMapFunction<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final @Nullable String collection;
|
||||
private final Query query;
|
||||
private final @Nullable String mapFunction;
|
||||
private final @Nullable String reduceFunction;
|
||||
private final @Nullable MapReduceOptions options;
|
||||
|
||||
ExecutableMapReduceSupport(MongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
@Nullable String collection, Query query, @Nullable String mapFunction, @Nullable String reduceFunction,
|
||||
@Nullable MapReduceOptions options) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
this.mapFunction = mapFunction;
|
||||
this.reduceFunction = reduceFunction;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.TerminatingMapReduce#all()
|
||||
*/
|
||||
@Override
|
||||
public List<T> all() {
|
||||
return template.mapReduce(query, domainType, getCollectionName(), mapFunction, reduceFunction, options,
|
||||
returnType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithCollection#inCollection(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MapReduceWithProjection<T> inCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||
|
||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithQuery#query(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingMapReduce<T> matching(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <R> MapReduceWithQuery<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new ExecutableMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithOptions#with(org.springframework.data.mongodb.core.mapreduce.MapReduceOptions)
|
||||
*/
|
||||
@Override
|
||||
public ExecutableMapReduce<T> with(MapReduceOptions options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null! Please consider empty MapReduceOptions#options() instead.");
|
||||
|
||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithMapFunction#map(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MapReduceWithReduceFunction<T> map(String mapFunction) {
|
||||
|
||||
Assert.hasText(mapFunction, "MapFunction name must not be null nor empty!");
|
||||
|
||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithReduceFunction#reduce(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public ExecutableMapReduce<T> reduce(String reduceFunction) {
|
||||
|
||||
Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty!");
|
||||
|
||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -53,37 +53,6 @@ public interface ExecutableRemoveOperation {
|
||||
*/
|
||||
<T> ExecutableRemove<T> remove(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingRemove<T> {
|
||||
|
||||
/**
|
||||
* Remove all documents matching.
|
||||
*
|
||||
* @return the {@link DeleteResult}. Never {@literal null}.
|
||||
*/
|
||||
DeleteResult all();
|
||||
|
||||
/**
|
||||
* Remove the first matching document.
|
||||
*
|
||||
* @return the {@link DeleteResult}. Never {@literal null}.
|
||||
*/
|
||||
DeleteResult one();
|
||||
|
||||
/**
|
||||
* Remove and return all matching documents. <br/>
|
||||
* <strong>NOTE</strong> The entire list of documents will be fetched before sending the actual delete commands.
|
||||
* Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete
|
||||
* operation.
|
||||
*
|
||||
* @return empty {@link List} if no match found. Never {@literal null}.
|
||||
*/
|
||||
List<T> findAndRemove();
|
||||
}
|
||||
|
||||
/**
|
||||
* Collection override (optional).
|
||||
*
|
||||
@@ -104,6 +73,29 @@ public interface ExecutableRemoveOperation {
|
||||
RemoveWithQuery<T> inCollection(String collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingRemove<T> {
|
||||
|
||||
/**
|
||||
* Remove all documents matching.
|
||||
*
|
||||
* @return the {@link DeleteResult}. Never {@literal null}.
|
||||
*/
|
||||
DeleteResult all();
|
||||
|
||||
/**
|
||||
* Remove and return all matching documents. <br/>
|
||||
* <strong>NOTE</strong> The entire list of documents will be fetched before sending the actual delete commands.
|
||||
* Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete
|
||||
* operation.
|
||||
*
|
||||
* @return empty {@link List} if no match found. Never {@literal null}.
|
||||
*/
|
||||
List<T> findAndRemove();
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
|
||||
@@ -36,12 +36,24 @@ import com.mongodb.client.result.DeleteResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final @NonNull MongoTemplate tempate;
|
||||
private final MongoTemplate tempate;
|
||||
|
||||
/**
|
||||
* Create new {@link ExecutableRemoveOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if template is {@literal null}.
|
||||
*/
|
||||
ExecutableRemoveOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.tempate = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -98,16 +110,10 @@ class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
||||
*/
|
||||
@Override
|
||||
public DeleteResult all() {
|
||||
return template.doRemove(getCollectionName(), query, domainType, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#one()
|
||||
*/
|
||||
@Override
|
||||
public DeleteResult one() {
|
||||
return template.doRemove(getCollectionName(), query, domainType, false);
|
||||
String collectionName = getCollectionName();
|
||||
|
||||
return template.doRemove(collectionName, query, domainType);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -123,7 +129,7 @@ class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,13 +19,13 @@ import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* {@link ExecutableUpdateOperation} allows creation and execution of MongoDB update / findAndModify / findAndReplace
|
||||
* operations in a fluent API style. <br />
|
||||
* {@link ExecutableUpdateOperation} allows creation and execution of MongoDB update / findAndModify operations in a
|
||||
* fluent API style. <br />
|
||||
* The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching}, as well as
|
||||
* the {@link Update} via {@code apply} into the MongoDB specific representations. The collection to operate on is by
|
||||
* default derived from the initial {@literal domainType} and can be defined there via
|
||||
@@ -57,91 +57,6 @@ public interface ExecutableUpdateOperation {
|
||||
*/
|
||||
<T> ExecutableUpdate<T> update(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Trigger findAndModify execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingFindAndModify<T> {
|
||||
|
||||
/**
|
||||
* Find, modify and return the first matching document.
|
||||
*
|
||||
* @return {@link Optional#empty()} if nothing found.
|
||||
*/
|
||||
default Optional<T> findAndModify() {
|
||||
return Optional.ofNullable(findAndModifyValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Find, modify and return the first matching document.
|
||||
*
|
||||
* @return {@literal null} if nothing found.
|
||||
*/
|
||||
@Nullable
|
||||
T findAndModifyValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
interface TerminatingFindAndReplace<T> {
|
||||
|
||||
/**
|
||||
* Find, replace and return the first matching document.
|
||||
*
|
||||
* @return {@link Optional#empty()} if nothing found.
|
||||
*/
|
||||
default Optional<T> findAndReplace() {
|
||||
return Optional.ofNullable(findAndReplaceValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Find, replace and return the first matching document.
|
||||
*
|
||||
* @return {@literal null} if nothing found.
|
||||
*/
|
||||
@Nullable
|
||||
T findAndReplaceValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger update execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
interface TerminatingUpdate<T> extends TerminatingFindAndModify<T>, FindAndModifyWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Update all matching documents in the collection.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateResult all();
|
||||
|
||||
/**
|
||||
* Update the first document in the collection.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateResult first();
|
||||
|
||||
/**
|
||||
* Creates a new document if no documents match the filter query or updates the matching ones.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateResult upsert();
|
||||
}
|
||||
|
||||
/**
|
||||
* Declare the {@link Update} to apply.
|
||||
*
|
||||
@@ -158,16 +73,6 @@ public interface ExecutableUpdateOperation {
|
||||
* @throws IllegalArgumentException if update is {@literal null}.
|
||||
*/
|
||||
TerminatingUpdate<T> apply(Update update);
|
||||
|
||||
/**
|
||||
* Specify {@code replacement} object.
|
||||
*
|
||||
* @param replacement must not be {@literal null}.
|
||||
* @return new instance of {@link FindAndReplaceOptions}.
|
||||
* @throws IllegalArgumentException if options is {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
FindAndReplaceWithProjection<T> replaceWith(T replacement);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -226,43 +131,56 @@ public interface ExecutableUpdateOperation {
|
||||
}
|
||||
|
||||
/**
|
||||
* Define {@link FindAndReplaceOptions}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* Trigger findAndModify execution by calling one of the terminating methods.
|
||||
*/
|
||||
interface FindAndReplaceWithOptions<T> extends TerminatingFindAndReplace<T> {
|
||||
interface TerminatingFindAndModify<T> {
|
||||
|
||||
/**
|
||||
* Explicitly define {@link FindAndReplaceOptions} for the {@link Update}.
|
||||
* Find, modify and return the first matching document.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link FindAndReplaceOptions}.
|
||||
* @throws IllegalArgumentException if options is {@literal null}.
|
||||
* @return {@link Optional#empty()} if nothing found.
|
||||
*/
|
||||
FindAndReplaceWithProjection<T> withOptions(FindAndReplaceOptions options);
|
||||
default Optional<T> findAndModify() {
|
||||
return Optional.ofNullable(findAndModifyValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Find, modify and return the first matching document.
|
||||
*
|
||||
* @return {@literal null} if nothing found.
|
||||
*/
|
||||
@Nullable
|
||||
T findAndModifyValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type override (Optional).
|
||||
* Trigger update execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @since 2.0
|
||||
*/
|
||||
interface FindAndReplaceWithProjection<T> extends FindAndReplaceWithOptions<T> {
|
||||
interface TerminatingUpdate<T> extends TerminatingFindAndModify<T>, FindAndModifyWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to. <br />
|
||||
* Skip this step if you are anyway only interested in the original domain type.
|
||||
* Update all matching documents in the collection.
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link FindAndReplaceWithProjection}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<R> FindAndReplaceWithOptions<R> as(Class<R> resultType);
|
||||
UpdateResult all();
|
||||
|
||||
/**
|
||||
* Update the first document in the collection.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateResult first();
|
||||
|
||||
/**
|
||||
* Creates a new document if no documents match the filter query or updates the matching ones.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateResult upsert();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -35,12 +35,23 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final @NonNull MongoTemplate template;
|
||||
private final MongoTemplate template;
|
||||
|
||||
/**
|
||||
* Creates new {@link ExecutableUpdateOperationSupport}.
|
||||
*
|
||||
* @param template must not be {@literal null}.
|
||||
*/
|
||||
ExecutableUpdateOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -51,7 +62,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
|
||||
return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType);
|
||||
return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -61,18 +72,14 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableUpdateSupport<T>
|
||||
implements ExecutableUpdate<T>, UpdateWithCollection<T>, UpdateWithQuery<T>, TerminatingUpdate<T>,
|
||||
FindAndReplaceWithOptions<T>, TerminatingFindAndReplace<T>, FindAndReplaceWithProjection<T> {
|
||||
implements ExecutableUpdate<T>, UpdateWithCollection<T>, UpdateWithQuery<T>, TerminatingUpdate<T> {
|
||||
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class domainType;
|
||||
@NonNull Class<T> domainType;
|
||||
Query query;
|
||||
@Nullable Update update;
|
||||
@Nullable String collection;
|
||||
@Nullable FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable FindAndReplaceOptions findAndReplaceOptions;
|
||||
@Nullable Object replacement;
|
||||
@NonNull Class<T> targetType;
|
||||
@Nullable FindAndModifyOptions options;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -83,8 +90,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -96,8 +102,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -109,34 +114,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options,
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#replaceWith(Object)
|
||||
*/
|
||||
@Override
|
||||
public FindAndReplaceWithProjection<T> replaceWith(T replacement) {
|
||||
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.FindAndReplaceWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@Override
|
||||
public FindAndReplaceWithProjection<T> withOptions(FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||
options, replacement, targetType);
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -148,21 +126,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <R> FindAndReplaceWithOptions<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||
findAndReplaceOptions, replacement, resultType);
|
||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -198,22 +162,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
*/
|
||||
@Override
|
||||
public @Nullable T findAndModifyValue() {
|
||||
|
||||
return template.findAndModify(query, update,
|
||||
findAndModifyOptions != null ? findAndModifyOptions : new FindAndModifyOptions(), targetType,
|
||||
getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingFindAndReplace#findAndReplaceValue()
|
||||
*/
|
||||
@Override
|
||||
public @Nullable T findAndReplaceValue() {
|
||||
|
||||
return (T) template.findAndReplace(query, replacement,
|
||||
findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.empty(), domainType,
|
||||
getCollectionName(), targetType);
|
||||
return template.findAndModify(query, update, options != null ? options : new FindAndModifyOptions(), domainType, getCollectionName());
|
||||
}
|
||||
|
||||
private UpdateResult doUpdate(boolean multi, boolean upsert) {
|
||||
@@ -221,7 +170,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,55 +33,18 @@ public class FindAndModifyOptions {
|
||||
|
||||
private @Nullable Collation collation;
|
||||
|
||||
private static final FindAndModifyOptions NONE = new FindAndModifyOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed. Please use FindAndModifyOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions returnNew(boolean returnNew) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions upsert(boolean upsert) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions remove(boolean remove) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions collation(@Nullable Collation collation) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a FindAndModifyOptions instance
|
||||
*
|
||||
* @return new instance of {@link FindAndModifyOptions}.
|
||||
* @return a new instance
|
||||
*/
|
||||
public static FindAndModifyOptions options() {
|
||||
return new FindAndModifyOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndModifyOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndModifyOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndModifyOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link FindAndModifyOptions} based on option of given {@litearl source}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return new instance of {@link FindAndModifyOptions}.
|
||||
* @param options
|
||||
* @return
|
||||
* @since 2.0
|
||||
*/
|
||||
public static FindAndModifyOptions of(@Nullable FindAndModifyOptions source) {
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Options for
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>.
|
||||
* <br />
|
||||
* Defaults to
|
||||
* <dl>
|
||||
* <dt>returnNew</dt>
|
||||
* <dd>false</dd>
|
||||
* <dt>upsert</dt>
|
||||
* <dd>false</dd>
|
||||
* </dl>
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
public class FindAndReplaceOptions {
|
||||
|
||||
private boolean returnNew;
|
||||
private boolean upsert;
|
||||
|
||||
private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed. Please use FindAndReplaceOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions returnNew() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions upsert() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance.
|
||||
* <dl>
|
||||
* <dt>returnNew</dt>
|
||||
* <dd>false</dd>
|
||||
* <dt>upsert</dt>
|
||||
* <dd>false</dd>
|
||||
* </dl>
|
||||
*
|
||||
* @return new instance of {@link FindAndReplaceOptions}.
|
||||
*/
|
||||
public static FindAndReplaceOptions options() {
|
||||
return new FindAndReplaceOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndReplaceOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance with
|
||||
* <dl>
|
||||
* <dt>returnNew</dt>
|
||||
* <dd>false</dd>
|
||||
* <dt>upsert</dt>
|
||||
* <dd>false</dd>
|
||||
* </dl>
|
||||
*
|
||||
* @return new instance of {@link FindAndReplaceOptions}.
|
||||
*/
|
||||
public static FindAndReplaceOptions empty() {
|
||||
return new FindAndReplaceOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the replacement document.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public FindAndReplaceOptions returnNew() {
|
||||
|
||||
this.returnNew = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a new document if not exists.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public FindAndReplaceOptions upsert() {
|
||||
|
||||
this.upsert = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the bit indicating to return the replacement document.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isReturnNew() {
|
||||
return returnNew;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the bit indicating if to create a new document if not exists.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isUpsert() {
|
||||
return upsert;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -22,4 +22,4 @@ package org.springframework.data.mongodb.core;
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface FluentMongoOperations extends ExecutableFindOperation, ExecutableInsertOperation,
|
||||
ExecutableUpdateOperation, ExecutableRemoveOperation, ExecutableAggregationOperation, ExecutableMapReduceOperation {}
|
||||
ExecutableUpdateOperation, ExecutableRemoveOperation, ExecutableAggregationOperation {}
|
||||
|
||||
@@ -1,150 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
|
||||
import com.mongodb.client.model.Filters;
|
||||
|
||||
/**
|
||||
* A MongoDB document in its mapped state. I.e. after a source document has been mapped using mapping information of the
|
||||
* entity the source document was supposed to represent.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor(staticName = "of")
|
||||
public class MappedDocument {
|
||||
|
||||
private static final String ID_FIELD = "_id";
|
||||
private static final Document ID_ONLY_PROJECTION = new Document(ID_FIELD, 1);
|
||||
|
||||
private final @Getter Document document;
|
||||
|
||||
public static Document getIdOnlyProjection() {
|
||||
return ID_ONLY_PROJECTION;
|
||||
}
|
||||
|
||||
public static Document getIdIn(Collection<?> ids) {
|
||||
return new Document(ID_FIELD, new Document("$in", ids));
|
||||
}
|
||||
|
||||
public static List<Object> toIds(Collection<Document> documents) {
|
||||
|
||||
return documents.stream()//
|
||||
.map(it -> it.get(ID_FIELD))//
|
||||
.collect(StreamUtils.toUnmodifiableList());
|
||||
}
|
||||
|
||||
public boolean hasId() {
|
||||
return document.containsKey(ID_FIELD);
|
||||
}
|
||||
|
||||
public boolean hasNonNullId() {
|
||||
return hasId() && document.get(ID_FIELD) != null;
|
||||
}
|
||||
|
||||
public Object getId() {
|
||||
return document.get(ID_FIELD);
|
||||
}
|
||||
|
||||
public <T> T getId(Class<T> type) {
|
||||
return document.get(ID_FIELD, type);
|
||||
}
|
||||
|
||||
public boolean isIdPresent(Class<?> type) {
|
||||
return type.isInstance(getId());
|
||||
}
|
||||
|
||||
public Bson getIdFilter() {
|
||||
return Filters.eq(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public UpdateDefinition updateWithoutId() {
|
||||
return new MappedUpdate(Update.fromDocument(document, ID_FIELD));
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link UpdateDefinition} that indicates that the {@link #getUpdateObject() update object} has already been
|
||||
* mapped to the specific domain type.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class MappedUpdate implements UpdateDefinition {
|
||||
|
||||
private final Update delegate;
|
||||
|
||||
MappedUpdate(Update delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getUpdateObject()
|
||||
*/
|
||||
@Override
|
||||
public Document getUpdateObject() {
|
||||
return delegate.getUpdateObject();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#modifies(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public boolean modifies(String key) {
|
||||
return delegate.modifies(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#inc(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void inc(String version) {
|
||||
delegate.inc(version);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#isIsolated()
|
||||
*/
|
||||
@Override
|
||||
public Boolean isIsolated() {
|
||||
return delegate.isIsolated();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return delegate.getArrayFilters();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,210 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
* domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names}
|
||||
* and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.data.mongodb.core.MongoJsonSchemaCreator#createSchemaFor(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public MongoJsonSchema createSchemaFor(Class<?> type) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty nested : entity) {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
break;
|
||||
}
|
||||
|
||||
currentPath.add(nested);
|
||||
schemaProperties.add(computeSchemaForProperty(currentPath));
|
||||
}
|
||||
|
||||
return schemaProperties;
|
||||
}
|
||||
|
||||
private JsonSchemaProperty computeSchemaForProperty(List<MongoPersistentProperty> path) {
|
||||
|
||||
MongoPersistentProperty property = CollectionUtils.lastElement(path);
|
||||
|
||||
boolean required = isRequiredProperty(property);
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
} else if (property.isMap()) {
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentProperty property, boolean required) {
|
||||
|
||||
ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName());
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(path,
|
||||
mappingContext.getRequiredPersistentEntity(property));
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) {
|
||||
return createSchemaProperty(fieldName, type, required, Collections.emptyList());
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
|
||||
return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName()
|
||||
: property.getName();
|
||||
}
|
||||
|
||||
private boolean isRequiredProperty(PersistentProperty property) {
|
||||
return property.getType().isPrimitive();
|
||||
}
|
||||
|
||||
private Class<?> computeTargetType(PersistentProperty<?> property) {
|
||||
|
||||
if (!(property instanceof MongoPersistentProperty)) {
|
||||
return property.getType();
|
||||
}
|
||||
|
||||
MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property;
|
||||
if (!mongoProperty.isIdProperty()) {
|
||||
return mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
if (mongoProperty.hasExplicitWriteTarget()) {
|
||||
return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass();
|
||||
}
|
||||
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
|
||||
if (!required) {
|
||||
return property;
|
||||
}
|
||||
|
||||
return JsonSchemaProperty.required(property);
|
||||
}
|
||||
}
|
||||
@@ -41,8 +41,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
|
||||
private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build();
|
||||
|
||||
// TODO: Mongo Driver 4 - use application name insetad of description if not available
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getApplicationName();
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getDescription();
|
||||
private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost();
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost();
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS
|
||||
@@ -52,8 +51,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime();
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout();
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout();
|
||||
|
||||
// TODO: Mongo Driver 4 - check if available
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive();
|
||||
private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference();
|
||||
private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory();
|
||||
@@ -61,8 +58,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern();
|
||||
private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory();
|
||||
private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled();
|
||||
|
||||
// TODO: Mongo Driver 4 - remove this option
|
||||
private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans();
|
||||
private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency();
|
||||
private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency();
|
||||
@@ -79,7 +74,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
*
|
||||
* @param description
|
||||
*/
|
||||
// TODO: Mongo Driver 4 - deprecate that one and add application name
|
||||
public void setDescription(@Nullable String description) {
|
||||
this.description = description;
|
||||
}
|
||||
@@ -241,7 +235,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls if the driver should us an SSL connection. Defaults to {@literal false}.
|
||||
* This controls if the driver should us an SSL connection. Defaults to |@literal false}.
|
||||
*
|
||||
* @param ssl
|
||||
*/
|
||||
@@ -291,7 +285,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
.cursorFinalizerEnabled(cursorFinalizerEnabled) //
|
||||
.dbDecoderFactory(dbDecoderFactory) //
|
||||
.dbEncoderFactory(dbEncoderFactory) //
|
||||
.applicationName(description) // TODO: Mongo Driver 4 - use application name if description not available
|
||||
.description(description) //
|
||||
.heartbeatConnectTimeout(heartbeatConnectTimeout) //
|
||||
.heartbeatFrequency(heartbeatFrequency) //
|
||||
.heartbeatSocketTimeout(heartbeatSocketTimeout) //
|
||||
@@ -303,9 +297,8 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
.readPreference(readPreference) //
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.sslEnabled(ssl) //
|
||||
.socketFactory(socketFactoryToUse) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketKeepAlive(socketKeepAlive) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketFactory(socketFactoryToUse) //
|
||||
.socketKeepAlive(socketKeepAlive) //
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
.writeConcern(writeConcern).build();
|
||||
|
||||
@@ -1,272 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} and {@link com.mongodb.MongoClient}
|
||||
* defining common properties such as database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 2.1
|
||||
* @see SimpleMongoDbFactory
|
||||
* @see SimpleMongoClientDbFactory
|
||||
*/
|
||||
public abstract class MongoDbFactorySupport<C> implements MongoDbFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
* {@link MongoDbFactorySupport} to close the client on {@link #destroy()}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return getDb(databaseName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return new MongoDbFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDbFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDbFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDbFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
return delegate.getLegacyDb();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -30,8 +30,6 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.MongoTransactionException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -122,32 +120,18 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
int code = ((MongoException) ex).getCode();
|
||||
|
||||
if (MongoDbErrorCodes.isDuplicateKeyCode(code)) {
|
||||
return new DuplicateKeyException(ex.getMessage(), ex);
|
||||
throw new DuplicateKeyException(ex.getMessage(), ex);
|
||||
} else if (MongoDbErrorCodes.isDataAccessResourceFailureCode(code)) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
throw new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
} else if (MongoDbErrorCodes.isInvalidDataAccessApiUsageCode(code) || code == 10003 || code == 12001
|
||||
|| code == 12010 || code == 12011 || code == 12012) {
|
||||
return new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
} else if (MongoDbErrorCodes.isPermissionDeniedCode(code)) {
|
||||
return new PermissionDeniedDataAccessException(ex.getMessage(), ex);
|
||||
} else if (MongoDbErrorCodes.isClientSessionFailureCode(code)) {
|
||||
return new ClientSessionException(ex.getMessage(), ex);
|
||||
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
|
||||
return new MongoTransactionException(ex.getMessage(), ex);
|
||||
throw new PermissionDeniedDataAccessException(ex.getMessage(), ex);
|
||||
}
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
// may interfere with OmitStackTraceInFastThrow (enabled by default).
|
||||
// see https://jira.spring.io/browse/DATAMONGO-1905
|
||||
if (ex instanceof IllegalStateException) {
|
||||
for (StackTraceElement elm : ex.getStackTrace()) {
|
||||
if (elm.getClassName().contains("ClientSession")) {
|
||||
return new ClientSessionException(ex.getMessage(), ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, we have an exception that resulted from user code,
|
||||
// rather than the persistence provider, so we return null to indicate
|
||||
// that translation should not occur.
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the
|
||||
* following mapping rules.
|
||||
* <p>
|
||||
* <strong>Required Properties</strong>
|
||||
* <ul>
|
||||
* <li>Properties of primitive type</li>
|
||||
* </ul>
|
||||
* <strong>Ignored Properties</strong>
|
||||
* <ul>
|
||||
* <li>All properties annotated with {@link org.springframework.data.annotation.Transient}</li>
|
||||
* </ul>
|
||||
* <strong>Property Type Mapping</strong>
|
||||
* <ul>
|
||||
* <li>{@link java.lang.Object} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.util.Arrays} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Collection} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Map} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}</li>
|
||||
* <li>Simple Types -> {@code type : 'the corresponding bson type' }</li>
|
||||
* <li>Domain Types -> {@code type : 'object', properties : {the types properties} }</li>
|
||||
* </ul>
|
||||
* <br />
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
* </p>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface MongoJsonSchemaCreator {
|
||||
|
||||
/**
|
||||
* Create the {@link MongoJsonSchema} for the given {@link Class type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
*
|
||||
* @param mongoConverter must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MongoConverter mongoConverter) {
|
||||
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -18,8 +18,6 @@ package org.springframework.data.mongodb.core;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
@@ -28,7 +26,6 @@ import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupBy;
|
||||
@@ -42,13 +39,9 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.Cursor;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
@@ -57,10 +50,6 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Mark Pollack
|
||||
@@ -162,64 +151,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
@Nullable
|
||||
<T> T execute(String collectionName, CollectionCallback<T> action);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession}
|
||||
* with given {@literal sessionOptions} to each and every command issued against MongoDB.
|
||||
*
|
||||
* @param sessionOptions must not be {@literal null}.
|
||||
* @return new instance of {@link SessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
SessionScoped withSession(ClientSessionOptions sessionOptions);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use the
|
||||
* {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}.
|
||||
*
|
||||
* @param sessionProvider must not be {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default SessionScoped withSession(Supplier<ClientSession> sessionProvider) {
|
||||
|
||||
Assert.notNull(sessionProvider, "SessionProvider must not be null!");
|
||||
|
||||
return new SessionScoped() {
|
||||
|
||||
private final Object lock = new Object();
|
||||
private @Nullable ClientSession session = null;
|
||||
|
||||
@Override
|
||||
public <T> T execute(SessionCallback<T> action, Consumer<ClientSession> onComplete) {
|
||||
|
||||
synchronized (lock) {
|
||||
if (session == null) {
|
||||
session = sessionProvider.get();
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return action.doInSession(MongoOperations.this.withSession(session));
|
||||
} finally {
|
||||
onComplete.accept(session);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return {@link ClientSession} bound instance of {@link MongoOperations}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoOperations withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB
|
||||
* {@link Cursor}.
|
||||
@@ -293,15 +224,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
Set<String> getCollectionNames();
|
||||
|
||||
/**
|
||||
* Get a {@link MongoCollection} by its name. The returned collection may not exists yet (except in local memory) and
|
||||
* is created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* Get a collection by name, creating it if it doesn't exist.
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
* @return an existing collection or a newly created one.
|
||||
*/
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
@@ -362,9 +290,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
ScriptOperations scriptOps();
|
||||
|
||||
/**
|
||||
@@ -392,7 +318,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Returns a new {@link BulkOperations} for the given entity type and collection name.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityType the name of the entity class. Can be {@literal null}.
|
||||
* @param entityClass the name of the entity class. Can be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link BulkOperations} on the named collection associated with the given entity class.
|
||||
*/
|
||||
@@ -431,16 +357,14 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Execute a group operation over the entire collection. The group operation entity class should match the 'shape' of
|
||||
* the returned object that takes int account the initial document structure as well as any finalize functions.
|
||||
*
|
||||
* @param criteria The criteria that restricts the row that are considered for grouping. If not specified all rows are
|
||||
* considered.
|
||||
* @param inputCollectionName the collection where the group operation will read from
|
||||
* @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document,
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
@@ -455,12 +379,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} and
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy,
|
||||
Class<T> entityClass);
|
||||
|
||||
@@ -648,52 +567,24 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected. Must not be {@literal null} nor empty.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -819,67 +710,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findById(Object id, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and
|
||||
* returns the results in a {@link List}.
|
||||
*
|
||||
* @param field the name of the field to inspect for distinct values. Must not be {@literal null}.
|
||||
* @param entityClass the domain type used for determining the actual {@link MongoCollection}. Must not be
|
||||
* {@literal null}.
|
||||
* @param resultClass the result type. Must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> List<T> findDistinct(String field, Class<?> entityClass, Class<T> resultClass) {
|
||||
return findDistinct(new Query(), field, entityClass, resultClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and
|
||||
* returns the results in a {@link List}.
|
||||
*
|
||||
* @param query filter {@link Query} to restrict search. Must not be {@literal null}.
|
||||
* @param field the name of the field to inspect for distinct values. Must not be {@literal null}.
|
||||
* @param entityClass the domain type used for determining the actual {@link MongoCollection} and mapping the
|
||||
* {@link Query} to the domain type fields. Must not be {@literal null}.
|
||||
* @param resultClass the result type. Must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
<T> List<T> findDistinct(Query query, String field, Class<?> entityClass, Class<T> resultClass);
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and
|
||||
* returns the results in a {@link List}.
|
||||
*
|
||||
* @param query filter {@link Query} to restrict search. Must not be {@literal null}.
|
||||
* @param field the name of the field to inspect for distinct values. Must not be {@literal null}.
|
||||
* @param collectionName the explicit name of the actual {@link MongoCollection}. Must not be {@literal null}.
|
||||
* @param entityClass the domain type used for mapping the {@link Query} to the domain type fields.
|
||||
* @param resultClass the result type. Must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
<T> List<T> findDistinct(Query query, String field, String collectionName, Class<?> entityClass,
|
||||
Class<T> resultClass);
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and
|
||||
* returns the results in a {@link List}.
|
||||
*
|
||||
* @param query filter {@link Query} to restrict search. Must not be {@literal null}.
|
||||
* @param field the name of the field to inspect for distinct values. Must not be {@literal null}.
|
||||
* @param collection the explicit name of the actual {@link MongoCollection}. Must not be {@literal null}.
|
||||
* @param resultClass the result type. Must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> List<T> findDistinct(Query query, String field, String collection, Class<T> resultClass) {
|
||||
return findDistinct(query, field, collection, Object.class, resultClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
@@ -891,8 +723,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
@@ -905,8 +737,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -922,8 +754,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -940,167 +772,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* The collection name is derived from the {@literal replacement} type. <br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found.
|
||||
* @since 2.1
|
||||
*/
|
||||
@Nullable
|
||||
default <T> T findAndReplace(Query query, T replacement) {
|
||||
return findAndReplace(query, replacement, FindAndReplaceOptions.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document.<br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found.
|
||||
* @since 2.1
|
||||
*/
|
||||
@Nullable
|
||||
default <T> T findAndReplace(Query query, T replacement, String collectionName) {
|
||||
return findAndReplace(query, replacement, FindAndReplaceOptions.empty(), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
@Nullable
|
||||
default <T> T findAndReplace(Query query, T replacement, FindAndReplaceOptions options) {
|
||||
return findAndReplace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
@Nullable
|
||||
default <T> T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) {
|
||||
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
return findAndReplace(query, replacement, options, (Class<T>) ClassUtils.getUserClass(replacement), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityType the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
@Nullable
|
||||
default <T> T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, Class<T> entityType,
|
||||
String collectionName) {
|
||||
|
||||
return findAndReplace(query, replacement, options, entityType, collectionName, entityType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection
|
||||
* from. Must not be {@literal null}.
|
||||
* @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of
|
||||
* {@code Object.class} instead.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
@Nullable
|
||||
default <S, T> T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class<S> entityType,
|
||||
Class<T> resultType) {
|
||||
|
||||
return findAndReplace(query, replacement, options, entityType,
|
||||
getCollectionName(ClassUtils.getUserClass(entityType)), resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityType the type used for mapping the {@link Query} to domain type fields. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of
|
||||
* {@code Object.class} instead.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
@Nullable
|
||||
<S, T> T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class<S> entityType,
|
||||
String collectionName, Class<T> resultType);
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
|
||||
* specified type. The first document that matches the query is returned and also removed from the collection in the
|
||||
@@ -1140,11 +811,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1156,11 +822,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1171,11 +833,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1193,16 +851,15 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
*/
|
||||
<T> T insert(T objectToSave);
|
||||
void insert(Object objectToSave);
|
||||
|
||||
/**
|
||||
* Insert the object into the specified collection.
|
||||
@@ -1214,36 +871,32 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
*/
|
||||
<T> T insert(T objectToSave, String collectionName);
|
||||
void insert(Object objectToSave, String collectionName);
|
||||
|
||||
/**
|
||||
* Insert a Collection of objects into a collection in a single batch write to the database.
|
||||
*
|
||||
* @param batchToSave the batch of objects to save. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the inserted objects that.
|
||||
*/
|
||||
<T> Collection<T> insert(Collection<? extends T> batchToSave, Class<?> entityClass);
|
||||
void insert(Collection<? extends Object> batchToSave, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Insert a batch of objects into the specified collection in a single batch write to the database.
|
||||
*
|
||||
* @param batchToSave the list of objects to save. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted objects that.
|
||||
*/
|
||||
<T> Collection<T> insert(Collection<? extends T> batchToSave, String collectionName);
|
||||
void insert(Collection<? extends Object> batchToSave, String collectionName);
|
||||
|
||||
/**
|
||||
* Insert a mixed Collection of objects into a database collection determining the collection name to use based on the
|
||||
* class.
|
||||
*
|
||||
* @param objectsToSave the list of objects to save. Must not be {@literal null}.
|
||||
* @return the inserted objects.
|
||||
*/
|
||||
<T> Collection<T> insertAll(Collection<? extends T> objectsToSave);
|
||||
void insertAll(Collection<? extends Object> objectsToSave);
|
||||
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
@@ -1255,13 +908,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
*/
|
||||
<T> T save(T objectToSave);
|
||||
void save(Object objectToSave);
|
||||
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
@@ -1273,14 +925,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
*/
|
||||
<T> T save(T objectToSave, String collectionName);
|
||||
void save(Object objectToSave, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
@@ -1409,10 +1060,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
* Remove the given object from the collection by id.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
|
||||
@@ -1420,10 +1068,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
DeleteResult remove(Object object);
|
||||
|
||||
/**
|
||||
* Removes the given object from the given collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
* Removes the given object from the given collection.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,75 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.ProjectionInformation;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* Common operations performed on properties of an entity like extracting fields information for projection creation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor(access = AccessLevel.PACKAGE)
|
||||
class PropertyOperations {
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
/**
|
||||
* For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for
|
||||
* creating the projection (target) type if the {@code targetType} is a {@literal DTO projection} or a
|
||||
* {@literal closed interface projection}.
|
||||
*
|
||||
* @param projectionFactory must not be {@literal null}.
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @param targetType must not be {@literal null}.
|
||||
* @return {@link Document} with fields to be included.
|
||||
*/
|
||||
Document computeFieldsForProjection(ProjectionFactory projectionFactory, Document fields, Class<?> domainType,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (!fields.isEmpty() || ClassUtils.isAssignable(domainType, targetType)) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = new Document();
|
||||
|
||||
if (targetType.isInterface()) {
|
||||
|
||||
ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType);
|
||||
|
||||
if (projectionInformation.isClosed()) {
|
||||
projectionInformation.getInputProperties().forEach(it -> projectedFields.append(it.getName(), 1));
|
||||
}
|
||||
} else {
|
||||
mappingContext.getRequiredPersistentEntity(targetType).doWithProperties(
|
||||
(SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1));
|
||||
}
|
||||
|
||||
return projectedFields;
|
||||
}
|
||||
}
|
||||
@@ -30,4 +30,5 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
public interface ReactiveCollectionCallback<T> {
|
||||
|
||||
Publisher<T> doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException;
|
||||
|
||||
}
|
||||
|
||||
@@ -85,23 +85,6 @@ public interface ReactiveFindOperation {
|
||||
*/
|
||||
Flux<T> all();
|
||||
|
||||
/**
|
||||
* Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will
|
||||
* not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the
|
||||
* document at the "end" of the collection and then the application deletes that document.
|
||||
* <p />
|
||||
* A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the
|
||||
* streams will linger and exhaust resources. <br/>
|
||||
* <strong>NOTE:</strong> Requires a capped collection.
|
||||
*
|
||||
* @return the {@link Flux} emitting converted objects.
|
||||
* @since 2.1
|
||||
*/
|
||||
Flux<T> tail();
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
*
|
||||
@@ -173,7 +156,7 @@ public interface ReactiveFindOperation {
|
||||
/**
|
||||
* Result type override (optional).
|
||||
*/
|
||||
interface FindWithProjection<T> extends FindWithQuery<T>, FindDistinct {
|
||||
interface FindWithProjection<T> extends FindWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to. <br />
|
||||
@@ -187,101 +170,8 @@ public interface ReactiveFindOperation {
|
||||
<R> FindWithQuery<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Distinct Find support.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface FindDistinct {
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection} or view.
|
||||
*
|
||||
* @param field name of the field. Must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if field is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<Object> distinct(String field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type override. Optional.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface DistinctWithProjection {
|
||||
|
||||
/**
|
||||
* Define the target type the result should be mapped to. <br />
|
||||
* Skip this step if you are anyway fine with the default conversion.
|
||||
* <dl>
|
||||
* <dt>{@link Object} (the default)</dt>
|
||||
* <dd>Result is mapped according to the {@link org.bson.BsonType} converting eg. {@link org.bson.BsonString} into
|
||||
* plain {@link String}, {@link org.bson.BsonInt64} to {@link Long}, etc. always picking the most concrete type with
|
||||
* respect to the domain types property.<br />
|
||||
* Any {@link org.bson.BsonType#DOCUMENT} is run through the {@link org.springframework.data.convert.EntityReader}
|
||||
* to obtain the domain type. <br />
|
||||
* Using {@link Object} also works for non strictly typed fields. Eg. a mixture different types like fields using
|
||||
* {@link String} in one {@link org.bson.Document} while {@link Long} in another.</dd>
|
||||
* <dt>Any Simple type like {@link String}, {@link Long}, ...</dt>
|
||||
* <dd>The result is mapped directly by the MongoDB Java driver and the {@link org.bson.codecs.CodeCodec Codecs} in
|
||||
* place. This works only for results where all documents considered for the operation use the very same type for
|
||||
* the field.</dd>
|
||||
* <dt>Any Domain type</dt>
|
||||
* <dd>Domain types can only be mapped if the if the result of the actual {@code distinct()} operation returns
|
||||
* {@link org.bson.BsonType#DOCUMENT}.</dd>
|
||||
* <dt>{@link org.bson.BsonValue}</dt>
|
||||
* <dd>Using {@link org.bson.BsonValue} allows retrieval of the raw driver specific format, which returns eg.
|
||||
* {@link org.bson.BsonString}.</dd>
|
||||
* </dl>
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> TerminatingDistinct<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result restrictions. Optional.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface DistinctWithQuery<T> extends DistinctWithProjection {
|
||||
|
||||
/**
|
||||
* Set the filter query to be used.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Terminating distinct find operations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface TerminatingDistinct<T> extends DistinctWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Get all matching distinct field values.
|
||||
*
|
||||
* @return empty {@link Flux} if not match found. Never {@literal null}.
|
||||
*/
|
||||
Flux<T> all();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ReactiveFind} provides methods for constructing lookup operations in a fluent way.
|
||||
*/
|
||||
interface ReactiveFind<T> extends FindWithCollection<T>, FindWithProjection<T>, FindDistinct {}
|
||||
interface ReactiveFind<T> extends FindWithCollection<T>, FindWithProjection<T> {}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import org.springframework.lang.Nullable;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@@ -27,7 +28,6 @@ import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -169,15 +169,6 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
return doFind(null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#tail()
|
||||
*/
|
||||
@Override
|
||||
public Flux<T> tail() {
|
||||
return doFind(template.new TailingQueryFindPublisherPreparer(query, domainType));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery)
|
||||
@@ -205,18 +196,6 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
return template.exists(query, domainType, getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindDistinct#distinct(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingDistinct<Object> distinct(String field) {
|
||||
|
||||
Assert.notNull(field, "Field must not be null!");
|
||||
|
||||
return new DistinctOperationSupport<>(this, field);
|
||||
}
|
||||
|
||||
private Flux<T> doFind(@Nullable FindPublisherPreparer preparer) {
|
||||
|
||||
Document queryObject = query.getQueryObject();
|
||||
@@ -226,13 +205,6 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
preparer != null ? preparer : getCursorPreparer(query));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Flux<T> doFindDistinct(String field) {
|
||||
|
||||
return template.findDistinct(query, field, getCollectionName(), domainType,
|
||||
returnType == domainType ? (Class<T>) Object.class : returnType);
|
||||
}
|
||||
|
||||
private FindPublisherPreparer getCursorPreparer(Query query) {
|
||||
return template.new QueryFindPublisherPreparer(query, domainType);
|
||||
}
|
||||
@@ -244,55 +216,5 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
private String asString() {
|
||||
return SerializationUtils.serializeToJsonSafely(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
static class DistinctOperationSupport<T> implements TerminatingDistinct<T> {
|
||||
|
||||
private final String field;
|
||||
private final ReactiveFindSupport delegate;
|
||||
|
||||
public DistinctOperationSupport(ReactiveFindSupport delegate, String field) {
|
||||
|
||||
this.delegate = delegate;
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveFindOperation.DistinctWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <R> TerminatingDistinct<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.as(resultType), field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public TerminatingDistinct<T> matching(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
return new DistinctOperationSupport<>((ReactiveFindSupport<T>) delegate.matching(query), field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core..ReactiveFindOperation.TerminatingDistinct#all()
|
||||
*/
|
||||
@Override
|
||||
public Flux<T> all() {
|
||||
return delegate.doFindDistinct(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,8 +19,7 @@ package org.springframework.data.mongodb.core;
|
||||
* Stripped down interface providing access to a fluent API that specifies a basic set of reactive MongoDB operations.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFluentMongoOperations extends ReactiveFindOperation, ReactiveInsertOperation,
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation {}
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation {}
|
||||
|
||||
@@ -1,199 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
* {@link ReactiveMapReduceOperation} allows creation and execution of MongoDB mapReduce operations in a fluent API
|
||||
* style. The starting {@literal domainType} is used for mapping an optional {@link Query} provided via {@code matching}
|
||||
* into the MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping
|
||||
* back the results from the {@link org.bson.Document}. However, it is possible to define an different
|
||||
* {@literal returnType} via {@code as} to mapping the result.<br />
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
* .map("function() { emit(this.id, this.firstname) }")
|
||||
* .reduce("function(id, name) { return sum(id, name); }")
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(query(where("lastname").is("skywalker")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ReactiveMapReduceOperation {
|
||||
|
||||
/**
|
||||
* Start creating a mapReduce operation for the given {@literal domainType}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return new instance of {@link ExecutableFind}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> MapReduceWithMapFunction<T> mapReduce(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Trigger mapReduce execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface TerminatingMapReduce<T> {
|
||||
|
||||
/**
|
||||
* Get the {@link Flux} emitting mapReduce results.
|
||||
*
|
||||
* @return a {@link Flux} emitting the already mapped operation results.
|
||||
*/
|
||||
Flux<T> all();
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the Javascript {@code function()} used to map matching documents.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithMapFunction<T> {
|
||||
|
||||
/**
|
||||
* Set the Javascript map {@code function()}.
|
||||
*
|
||||
* @param mapFunction must not be {@literal null} nor empty.
|
||||
* @return new instance of {@link MapReduceWithReduceFunction}.
|
||||
* @throws IllegalArgumentException if {@literal mapFunction} is {@literal null} or empty.
|
||||
*/
|
||||
MapReduceWithReduceFunction<T> map(String mapFunction);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the Javascript {@code function()} used to reduce matching documents.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithReduceFunction<T> {
|
||||
|
||||
/**
|
||||
* Set the Javascript map {@code function()}.
|
||||
*
|
||||
* @param reduceFunction must not be {@literal null} nor empty.
|
||||
* @return new instance of {@link ReactiveMapReduce}.
|
||||
* @throws IllegalArgumentException if {@literal reduceFunction} is {@literal null} or empty.
|
||||
*/
|
||||
ReactiveMapReduce<T> reduce(String reduceFunction);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Collection override (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithCollection<T> extends MapReduceWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to perform the mapReduce operation on. <br />
|
||||
* Skip this step to use the default collection derived from the domain type.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link MapReduceWithProjection}.
|
||||
* @throws IllegalArgumentException if collection is {@literal null}.
|
||||
*/
|
||||
MapReduceWithProjection<T> inCollection(String collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Input document filter query (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithQuery<T> extends TerminatingMapReduce<T> {
|
||||
|
||||
/**
|
||||
* Set the filter query to be used.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type override (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithProjection<T> extends MapReduceWithQuery<T> {
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to. <br />
|
||||
* Skip this step if you are anyway only interested in the original domain type.
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> MapReduceWithQuery<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional mapReduce options (Optional).
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface MapReduceWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Set additional options to apply to the mapReduce operation.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveMapReduce}.
|
||||
* @throws IllegalArgumentException if options is {@literal null}.
|
||||
*/
|
||||
ReactiveMapReduce<T> with(MapReduceOptions options);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ReactiveMapReduce} provides methods for constructing reactive mapReduce operations in a fluent way.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
interface ReactiveMapReduce<T> extends MapReduceWithMapFunction<T>, MapReduceWithReduceFunction<T>,
|
||||
MapReduceWithCollection<T>, MapReduceWithProjection<T>, MapReduceWithOptions<T> {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,177 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ReactiveMapReduceOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final @NonNull ReactiveMongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation#mapReduce(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> ReactiveMapReduceSupport<T> mapReduce(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
|
||||
return new ReactiveMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
static class ReactiveMapReduceSupport<T>
|
||||
implements ReactiveMapReduce<T>, MapReduceWithOptions<T>, MapReduceWithCollection<T>, MapReduceWithProjection<T>,
|
||||
MapReduceWithQuery<T>, MapReduceWithReduceFunction<T>, MapReduceWithMapFunction<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final @Nullable String collection;
|
||||
private final Query query;
|
||||
private final @Nullable String mapFunction;
|
||||
private final @Nullable String reduceFunction;
|
||||
private final @Nullable MapReduceOptions options;
|
||||
|
||||
ReactiveMapReduceSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
@Nullable String collection, Query query, @Nullable String mapFunction, @Nullable String reduceFunction,
|
||||
@Nullable MapReduceOptions options) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
this.mapFunction = mapFunction;
|
||||
this.reduceFunction = reduceFunction;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.TerminatingMapReduce#all()
|
||||
*/
|
||||
@Override
|
||||
public Flux<T> all() {
|
||||
|
||||
return template.mapReduce(query, domainType, getCollectionName(), returnType, mapFunction, reduceFunction,
|
||||
options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithCollection#inCollection(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MapReduceWithProjection<T> inCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||
|
||||
return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithQuery#query(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingMapReduce<T> matching(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <R> MapReduceWithQuery<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new ReactiveMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithOptions#with(org.springframework.data.mongodb.core.mapreduce.MapReduceOptions)
|
||||
*/
|
||||
@Override
|
||||
public ReactiveMapReduce<T> with(MapReduceOptions options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null! Please consider empty MapReduceOptions#options() instead.");
|
||||
|
||||
return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithMapFunction#map(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MapReduceWithReduceFunction<T> map(String mapFunction) {
|
||||
|
||||
Assert.hasText(mapFunction, "MapFunction name must not be null nor empty!");
|
||||
|
||||
return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithReduceFunction#reduce(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public ReactiveMapReduce<T> reduce(String reduceFunction) {
|
||||
|
||||
Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty!");
|
||||
|
||||
return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||
reduceFunction, options);
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.springframework.util.Assert;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.context.Context;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* {@link ReactiveMongoContext} utilizes and enriches the Reactor {@link Context} with information potentially required
|
||||
* for e.g. {@link ClientSession} handling and transactions.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @see Mono#subscriberContext()
|
||||
* @see Context
|
||||
*/
|
||||
public class ReactiveMongoContext {
|
||||
|
||||
private static final Class<?> SESSION_KEY = ClientSession.class;
|
||||
|
||||
/**
|
||||
* Gets the {@code Mono<ClientSession>} from Reactor {@link reactor.util.context.Context}. The resulting {@link Mono}
|
||||
* emits the {@link ClientSession} if a session is associated with the current {@link reactor.util.context.Context
|
||||
* subscriber context}. If the context does not contain a session, the resulting {@link Mono} terminates empty (i.e.
|
||||
* without emitting a value).
|
||||
*
|
||||
* @return the {@link Mono} emitting the client session if present; otherwise the {@link Mono} terminates empty.
|
||||
*/
|
||||
public static Mono<ClientSession> getSession() {
|
||||
|
||||
return Mono.subscriberContext().filter(ctx -> ctx.hasKey(SESSION_KEY))
|
||||
.flatMap(ctx -> ctx.<Mono<ClientSession>> get(SESSION_KEY));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the {@link ClientSession} into the Reactor {@link reactor.util.context.Context}.
|
||||
*
|
||||
* @param context must not be {@literal null}.
|
||||
* @param session must not be {@literal null}.
|
||||
* @return a new {@link Context}.
|
||||
* @see Context#put(Object, Object)
|
||||
*/
|
||||
public static Context setSession(Context context, Publisher<ClientSession> session) {
|
||||
|
||||
Assert.notNull(context, "Context must not be null!");
|
||||
Assert.notNull(session, "Session publisher must not be null!");
|
||||
|
||||
return context.put(SESSION_KEY, Mono.from(session));
|
||||
}
|
||||
}
|
||||
@@ -19,36 +19,27 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.reactivestreams.Subscription;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.index.ReactiveIndexOperations;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.reactive.TransactionalOperator;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
|
||||
/**
|
||||
@@ -57,17 +48,13 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
* Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability
|
||||
* (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using
|
||||
* {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
* @see Flux
|
||||
* @see Mono
|
||||
* @see <a href="https://projectreactor.io/docs/">Project Reactor</a>
|
||||
* @see <a href="http://projectreactor.io/docs/">Project Reactor</a>
|
||||
*/
|
||||
public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
@@ -153,96 +140,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*/
|
||||
<T> Flux<T> execute(String collectionName, ReactiveCollectionCallback<T> action);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
*
|
||||
* @param sessionProvider must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default ReactiveSessionScoped withSession(Supplier<ClientSession> sessionProvider) {
|
||||
|
||||
Assert.notNull(sessionProvider, "SessionProvider must not be null!");
|
||||
|
||||
return withSession(Mono.fromSupplier(sessionProvider));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession}
|
||||
* with given {@literal sessionOptions} to each and every command issued against MongoDB.
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
*
|
||||
* @param sessionOptions must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the
|
||||
* {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB.
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
*
|
||||
* @param sessionProvider must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ReactiveSessionScoped withSession(Publisher<ClientSession> sessionProvider);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}.
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return {@link ClientSession} bound instance of {@link ReactiveMongoOperations}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ReactiveMongoOperations withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Initiate a new {@link ClientSession} and obtain a {@link ClientSession session} bound instance of
|
||||
* {@link ReactiveSessionScoped}. Starts the transaction and adds the {@link ClientSession} to each and every command
|
||||
* issued against MongoDB.
|
||||
* <p/>
|
||||
* Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction
|
||||
* that is {@link ClientSession#commitTransaction() committed} on success. Transactions are
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
*
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction();
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped}, start the transaction and
|
||||
* bind the {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against
|
||||
* MongoDB.
|
||||
* <p/>
|
||||
* Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction
|
||||
* that is {@link ClientSession#commitTransaction() committed} on success. Transactions are
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
*
|
||||
* @param sessionProvider must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction(Publisher<ClientSession> sessionProvider);
|
||||
|
||||
/**
|
||||
* Create an uncapped collection with a name based on the provided entity class.
|
||||
*
|
||||
@@ -286,15 +183,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
Flux<String> getCollectionNames();
|
||||
|
||||
/**
|
||||
* Get a {@link MongoCollection} by name. The returned collection may not exists yet (except in local memory) and is
|
||||
* created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* Get a collection by name, creating it if it doesn't exist.
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
* @return an existing collection or a newly created one.
|
||||
*/
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
@@ -483,65 +377,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*/
|
||||
<T> Mono<T> findById(Object id, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and
|
||||
* returns the results in a {@link Flux}.
|
||||
*
|
||||
* @param field the name of the field to inspect for distinct values. Must not be {@literal null}.
|
||||
* @param entityClass the domain type used for determining the actual {@link MongoCollection}. Must not be
|
||||
* {@literal null}.
|
||||
* @param resultClass the result type. Must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> Flux<T> findDistinct(String field, Class<?> entityClass, Class<T> resultClass) {
|
||||
return findDistinct(new Query(), field, entityClass, resultClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and
|
||||
* returns the results in a {@link Flux}.
|
||||
*
|
||||
* @param query filter {@link Query} to restrict search. Must not be {@literal null}.
|
||||
* @param field the name of the field to inspect for distinct values. Must not be {@literal null}.
|
||||
* @param entityClass the domain type used for determining the actual {@link MongoCollection} and mapping the
|
||||
* {@link Query} to the domain type fields. Must not be {@literal null}.
|
||||
* @param resultClass the result type. Must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
<T> Flux<T> findDistinct(Query query, String field, Class<?> entityClass, Class<T> resultClass);
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and
|
||||
* returns the results in a {@link Flux}.
|
||||
*
|
||||
* @param query filter {@link Query} to restrict search. Must not be {@literal null}.
|
||||
* @param field the name of the field to inspect for distinct values. Must not be {@literal null}.
|
||||
* @param collectionName the explicit name of the actual {@link MongoCollection}. Must not be {@literal null}.
|
||||
* @param entityClass the domain type used for mapping the {@link Query} to the domain type fields.
|
||||
* @param resultClass the result type. Must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
<T> Flux<T> findDistinct(Query query, String field, String collectionName, Class<?> entityClass,
|
||||
Class<T> resultClass);
|
||||
|
||||
/**
|
||||
* Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and
|
||||
* returns the results in a {@link Flux}.
|
||||
*
|
||||
* @param query filter {@link Query} to restrict search. Must not be {@literal null}.
|
||||
* @param field the name of the field to inspect for distinct values. Must not be {@literal null}.
|
||||
* @param collection the explicit name of the actual {@link MongoCollection}. Must not be {@literal null}.
|
||||
* @param resultClass the result type. Must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> Flux<T> findDistinct(Query query, String field, String collection, Class<T> resultClass) {
|
||||
return findDistinct(query, field, collection, Object.class, resultClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation.
|
||||
* <p>
|
||||
@@ -624,52 +459,24 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* entity mapping information to determine the collection the query is ran against. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB
|
||||
* limits the number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect
|
||||
* a particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -729,160 +536,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@link Mono#empty()}, if not found.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> Mono<T> findAndReplace(Query query, T replacement) {
|
||||
return findAndReplace(query, replacement, FindAndReplaceOptions.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@link Mono#empty()}, if not found.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> Mono<T> findAndReplace(Query query, T replacement, String collectionName) {
|
||||
return findAndReplace(query, replacement, FindAndReplaceOptions.empty(), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> Mono<T> findAndReplace(Query query, T replacement, FindAndReplaceOptions options) {
|
||||
return findAndReplace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> Mono<T> findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) {
|
||||
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
return findAndReplace(query, replacement, options, (Class<T>) ClassUtils.getUserClass(replacement), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityType the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <T> Mono<T> findAndReplace(Query query, T replacement, FindAndReplaceOptions options, Class<T> entityType,
|
||||
String collectionName) {
|
||||
|
||||
return findAndReplace(query, replacement, options, entityType, collectionName, entityType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection
|
||||
* from. Must not be {@literal null}.
|
||||
* @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of
|
||||
* {@code Object.class} instead.
|
||||
* @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
default <S, T> Mono<T> findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class<S> entityType,
|
||||
Class<T> resultType) {
|
||||
|
||||
return findAndReplace(query, replacement, options, entityType,
|
||||
getCollectionName(ClassUtils.getUserClass(entityType)), resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection
|
||||
* from. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @param resultType resultType the parametrized type projection return type. Must not be {@literal null}, use the
|
||||
* domain type of {@code Object.class} instead.
|
||||
* @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of
|
||||
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
|
||||
* as it is after the update.
|
||||
* @since 2.1
|
||||
*/
|
||||
<S, T> Mono<T> findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class<S> entityType,
|
||||
String collectionName, Class<T> resultType);
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
|
||||
* specified type. The first document that matches the query is returned and also removed from the collection in the
|
||||
@@ -920,11 +573,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -936,11 +584,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -951,11 +595,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -973,14 +613,14 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @return the saved object.
|
||||
*/
|
||||
<T> Mono<T> insert(T objectToSave);
|
||||
|
||||
@@ -994,7 +634,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @return the saved object.
|
||||
*/
|
||||
<T> Mono<T> insert(T objectToSave, String collectionName);
|
||||
|
||||
@@ -1003,7 +643,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param batchToSave the batch of objects to save. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the inserted objects .
|
||||
* @return the saved objects.
|
||||
*/
|
||||
<T> Flux<T> insert(Collection<? extends T> batchToSave, Class<?> entityClass);
|
||||
|
||||
@@ -1012,7 +652,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param batchToSave the list of objects to save. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted objects.
|
||||
* @return the saved objects.
|
||||
*/
|
||||
<T> Flux<T> insert(Collection<? extends T> batchToSave, String collectionName);
|
||||
|
||||
@@ -1033,14 +673,14 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the inserted objects.
|
||||
* @return the saved object.
|
||||
*/
|
||||
<T> Mono<T> insert(Mono<? extends T> objectToSave);
|
||||
|
||||
@@ -1049,7 +689,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param batchToSave the publisher which provides objects to save. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the inserted objects.
|
||||
* @return the saved objects.
|
||||
*/
|
||||
<T> Flux<T> insertAll(Mono<? extends Collection<? extends T>> batchToSave, Class<?> entityClass);
|
||||
|
||||
@@ -1058,7 +698,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param batchToSave the publisher which provides objects to save. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted objects.
|
||||
* @return the saved objects.
|
||||
*/
|
||||
<T> Flux<T> insertAll(Mono<? extends Collection<? extends T>> batchToSave, String collectionName);
|
||||
|
||||
@@ -1067,7 +707,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* class.
|
||||
*
|
||||
* @param objectsToSave the publisher which provides objects to save. Must not be {@literal null}.
|
||||
* @return the inserted objects.
|
||||
* @return the saved objects.
|
||||
*/
|
||||
<T> Flux<T> insertAll(Mono<? extends Collection<? extends T>> objectsToSave);
|
||||
|
||||
@@ -1081,8 +721,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1099,8 +739,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
@@ -1118,8 +758,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1136,8 +776,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
@@ -1409,115 +1049,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*/
|
||||
<T> Flux<T> tail(Query query, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Subscribe to a MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> for all events in
|
||||
* the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to
|
||||
* filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
* @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}.
|
||||
* @param targetType the result type to use.
|
||||
* @param <T>
|
||||
* @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive.
|
||||
* @since 2.1
|
||||
* @see ReactiveMongoDatabaseFactory#getMongoDatabase()
|
||||
* @see ChangeStreamOptions#getFilter()
|
||||
*/
|
||||
default <T> Flux<ChangeStreamEvent<T>> changeStream(ChangeStreamOptions options, Class<T> targetType) {
|
||||
return changeStream(null, options, targetType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to a MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> for all events in
|
||||
* the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter
|
||||
* events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
* @param collectionName the collection to watch. Can be {@literal null} to watch all collections.
|
||||
* @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}.
|
||||
* @param targetType the result type to use.
|
||||
* @param <T>
|
||||
* @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive.
|
||||
* @since 2.1
|
||||
* @see ChangeStreamOptions#getFilter()
|
||||
*/
|
||||
default <T> Flux<ChangeStreamEvent<T>> changeStream(@Nullable String collectionName, ChangeStreamOptions options,
|
||||
Class<T> targetType) {
|
||||
|
||||
return changeStream(null, collectionName, options, targetType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to a MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> via the reactive
|
||||
* infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed
|
||||
* unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
* @param database the database to watch. Can be {@literal null}, uses configured default if so.
|
||||
* @param collectionName the collection to watch. Can be {@literal null}, watches all collections if so.
|
||||
* @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}.
|
||||
* @param targetType the result type to use.
|
||||
* @param <T>
|
||||
* @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive.
|
||||
* @since 2.1
|
||||
* @see ChangeStreamOptions#getFilter()
|
||||
*/
|
||||
<T> Flux<ChangeStreamEvent<T>> changeStream(@Nullable String database, @Nullable String collectionName,
|
||||
ChangeStreamOptions options, Class<T> targetType);
|
||||
|
||||
/**
|
||||
* Execute a map-reduce operation. Use {@link MapReduceOptions} to optionally specify an output collection and other
|
||||
* args.
|
||||
*
|
||||
* @param filterQuery the selection criteria for the documents going input to the map function. Must not be
|
||||
* {@literal null}.
|
||||
* @param domainType source type used to determine the input collection name and map the filter {@link Query} against.
|
||||
* Must not be {@literal null}.
|
||||
* @param resultType the mapping target of the operations result documents. Must not be {@literal null}.
|
||||
* @param mapFunction the JavaScript map function. Must not be {@literal null}.
|
||||
* @param reduceFunction the JavaScript reduce function. Must not be {@literal null}.
|
||||
* @param options additional options like output collection. Must not be {@literal null}.
|
||||
* @return a {@link Flux} emitting the result document sequence. Never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
<T> Flux<T> mapReduce(Query filterQuery, Class<?> domainType, Class<T> resultType, String mapFunction,
|
||||
String reduceFunction, MapReduceOptions options);
|
||||
|
||||
/**
|
||||
* Execute a map-reduce operation. Use {@link MapReduceOptions} to optionally specify an output collection and other
|
||||
* args.
|
||||
*
|
||||
* @param filterQuery the selection criteria for the documents going input to the map function. Must not be
|
||||
* {@literal null}.
|
||||
* @param domainType source type used to map the filter {@link Query} against. Must not be {@literal null}.
|
||||
* @param inputCollectionName the input collection.
|
||||
* @param resultType the mapping target of the operations result documents. Must not be {@literal null}.
|
||||
* @param mapFunction the JavaScript map function. Must not be {@literal null}.
|
||||
* @param reduceFunction the JavaScript reduce function. Must not be {@literal null}.
|
||||
* @param options additional options like output collection. Must not be {@literal null}.
|
||||
* @return a {@link Flux} emitting the result document sequence. Never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
<T> Flux<T> mapReduce(Query filterQuery, Class<?> domainType, String inputCollectionName, Class<T> resultType,
|
||||
String mapFunction, String reduceFunction, MapReduceOptions options);
|
||||
|
||||
/**
|
||||
* Returns the underlying {@link MongoConverter}.
|
||||
*
|
||||
@@ -1525,13 +1056,4 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*/
|
||||
MongoConverter getConverter();
|
||||
|
||||
/**
|
||||
* The collection name used for the specified class by this template.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
* @since 2.1
|
||||
*/
|
||||
String getCollectionName(Class<?> entityClass);
|
||||
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,47 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
* Callback interface for executing operations within a {@link com.mongodb.reactivestreams.client.ClientSession} using
|
||||
* reactive infrastructure.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see com.mongodb.reactivestreams.client.ClientSession
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ReactiveSessionCallback<T> {
|
||||
|
||||
/**
|
||||
* Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is
|
||||
* inferred directly into the operation so that no further interaction is necessary.
|
||||
* <p />
|
||||
* Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and
|
||||
* others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway
|
||||
* objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or
|
||||
* {@link com.mongodb.reactivestreams.client.MongoDatabase} via eg.
|
||||
* {@link ReactiveMongoOperations#getCollection(String)} we leave responsibility for
|
||||
* {@link com.mongodb.session.ClientSession} again up to the caller.
|
||||
*
|
||||
* @param operations will never be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Publisher<T> doInSession(ReactiveMongoOperations operations);
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a
|
||||
* {@link ReactiveSessionCallback}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ReactiveSessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
* @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}.
|
||||
* @param <T> return type.
|
||||
* @return a result object returned by the action, can be {@link Flux#empty()}.
|
||||
*/
|
||||
default <T> Flux<T> execute(ReactiveSessionCallback<T> action) {
|
||||
return execute(action, (session) -> {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
* @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}.
|
||||
* @param doFinally callback object that accepts {@link ClientSession} after invoking {@link ReactiveSessionCallback}.
|
||||
* This {@link Consumer} is guaranteed to be notified in any case (successful and exceptional outcome of
|
||||
* {@link ReactiveSessionCallback}).
|
||||
* @param <T> return type.
|
||||
* @return a result object returned by the action, can be {@link Flux#empty()}.
|
||||
*/
|
||||
<T> Flux<T> execute(ReactiveSessionCallback<T> action, Consumer<ClientSession> doFinally);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user