Compare commits
152 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
63dfb59a3f | ||
|
|
d33ee2ffac | ||
|
|
59388d99cc | ||
|
|
ee6048e289 | ||
|
|
9a062d53f3 | ||
|
|
a3c5b07eb7 | ||
|
|
40d30a230d | ||
|
|
10116f7c93 | ||
|
|
705203c898 | ||
|
|
8fb9d9e5f4 | ||
|
|
c23c5ae6c6 | ||
|
|
e67bacf66c | ||
|
|
617dbdac3f | ||
|
|
8ad4f4b71b | ||
|
|
9048ec83af | ||
|
|
b2e3e3fb8e | ||
|
|
e9a2b84af5 | ||
|
|
5b8be281fb | ||
|
|
66b318dabe | ||
|
|
b78569374a | ||
|
|
79921a7260 | ||
|
|
bcd8f242e5 | ||
|
|
73fab14b21 | ||
|
|
d4505880c7 | ||
|
|
a54b91392e | ||
|
|
5dc7e7c65f | ||
|
|
8802e2c36f | ||
|
|
d8189620d2 | ||
|
|
fd6411f0ed | ||
|
|
445b9c83de | ||
|
|
4e13bda302 | ||
|
|
e634f2f7c0 | ||
|
|
0bb2b8785d | ||
|
|
9505112670 | ||
|
|
8167f9d199 | ||
|
|
e6bd8b3ee3 | ||
|
|
e33c8a3b76 | ||
|
|
34425c54db | ||
|
|
7d9c08409b | ||
|
|
69a4217c4b | ||
|
|
0a0ee417ac | ||
|
|
2365dba8d9 | ||
|
|
4b22558fe7 | ||
|
|
75847981c3 | ||
|
|
0e32b7356c | ||
|
|
c2436bcdfc | ||
|
|
7c25675cbb | ||
|
|
0145b68f61 | ||
|
|
c7ea0782df | ||
|
|
c3ef62a7be | ||
|
|
0eebf64ec3 | ||
|
|
bdd2b236e8 | ||
|
|
804106ec9a | ||
|
|
b3e70475eb | ||
|
|
1f146c3b5c | ||
|
|
d49b1c33b7 | ||
|
|
da0cb6d766 | ||
|
|
56ac8397aa | ||
|
|
945d3b0085 | ||
|
|
fad18341fa | ||
|
|
394efa8b82 | ||
|
|
880b3c6ee0 | ||
|
|
4307c46619 | ||
|
|
86bcfde568 | ||
|
|
5b3a9434f1 | ||
|
|
f2ac413c61 | ||
|
|
4e44d07402 | ||
|
|
1dc6a04d74 | ||
|
|
513e6a0111 | ||
|
|
8ca16e1cb4 | ||
|
|
296e97903d | ||
|
|
617828533e | ||
|
|
c0a22667b9 | ||
|
|
ada6eb814e | ||
|
|
4a17048ec6 | ||
|
|
06018fa3de | ||
|
|
8b406b23ff | ||
|
|
a3ef9b5856 | ||
|
|
bb280bd59b | ||
|
|
e24c5e0846 | ||
|
|
839aa1b1e6 | ||
|
|
ff7a189c98 | ||
|
|
de144a6b0d | ||
|
|
bfe514acd7 | ||
|
|
87343d2ae5 | ||
|
|
5e452e1be0 | ||
|
|
8ce8a8307a | ||
|
|
df94214527 | ||
|
|
31f8a63a17 | ||
|
|
888054bb2a | ||
|
|
29bf74c24c | ||
|
|
1798678e4d | ||
|
|
bd78ccede9 | ||
|
|
349b87a2c4 | ||
|
|
63ed62b988 | ||
|
|
8c1a7cc163 | ||
|
|
597354ea7e | ||
|
|
c09188561c | ||
|
|
c2ea595b8c | ||
|
|
f7731c7cf1 | ||
|
|
a7e6b26796 | ||
|
|
45bd6d544d | ||
|
|
d819028e46 | ||
|
|
b353bb6165 | ||
|
|
843ed64b87 | ||
|
|
f5c00b6978 | ||
|
|
05d585896c | ||
|
|
d2999b0918 | ||
|
|
2a6107fcb6 | ||
|
|
d937460351 | ||
|
|
89843a1488 | ||
|
|
e06f326de3 | ||
|
|
684e24aff5 | ||
|
|
7a22d697cf | ||
|
|
505ca4d2c4 | ||
|
|
b01b25ef93 | ||
|
|
60cb8c7de7 | ||
|
|
b4bc95ce5f | ||
|
|
b48ff3c38b | ||
|
|
4ab61bd4d4 | ||
|
|
24f23c5365 | ||
|
|
8d2a68118b | ||
|
|
8ce3e45d60 | ||
|
|
f456851791 | ||
|
|
221ffb1947 | ||
|
|
c3a5454d31 | ||
|
|
207c82a50b | ||
|
|
7596881a06 | ||
|
|
3ea670fb3b | ||
|
|
8df1c88db1 | ||
|
|
a34e8591de | ||
|
|
ebc9e3ba3a | ||
|
|
74368dd0e8 | ||
|
|
fff971e469 | ||
|
|
352f9ad372 | ||
|
|
89624518c9 | ||
|
|
8e02fd0329 | ||
|
|
0b49f47230 | ||
|
|
5c10a5821b | ||
|
|
c15ab4c946 | ||
|
|
c3769df07f | ||
|
|
33d69abd53 | ||
|
|
e683c8f08f | ||
|
|
ac1873a163 | ||
|
|
8b7071e6ae | ||
|
|
b8368a677d | ||
|
|
5c333d6159 | ||
|
|
7f4d3f27e6 | ||
|
|
a30ee07b4a | ||
|
|
cc45b4e081 | ||
|
|
0d0d17ebf6 | ||
|
|
b53caaf69d |
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
@@ -1,110 +0,0 @@
|
||||
/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/
|
||||
|
||||
import java.net.*;
|
||||
import java.io.*;
|
||||
import java.nio.channels.*;
|
||||
import java.util.Properties;
|
||||
|
||||
public class MavenWrapperDownloader {
|
||||
|
||||
/**
|
||||
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
|
||||
*/
|
||||
private static final String DEFAULT_DOWNLOAD_URL =
|
||||
"https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar";
|
||||
|
||||
/**
|
||||
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
|
||||
* use instead of the default one.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
|
||||
".mvn/wrapper/maven-wrapper.properties";
|
||||
|
||||
/**
|
||||
* Path where the maven-wrapper.jar will be saved to.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_JAR_PATH =
|
||||
".mvn/wrapper/maven-wrapper.jar";
|
||||
|
||||
/**
|
||||
* Name of the property which should be used to override the default download url for the wrapper.
|
||||
*/
|
||||
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
|
||||
|
||||
public static void main(String args[]) {
|
||||
System.out.println("- Downloader started");
|
||||
File baseDirectory = new File(args[0]);
|
||||
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
|
||||
|
||||
// If the maven-wrapper.properties exists, read it and check if it contains a custom
|
||||
// wrapperUrl parameter.
|
||||
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
|
||||
String url = DEFAULT_DOWNLOAD_URL;
|
||||
if(mavenWrapperPropertyFile.exists()) {
|
||||
FileInputStream mavenWrapperPropertyFileInputStream = null;
|
||||
try {
|
||||
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
|
||||
Properties mavenWrapperProperties = new Properties();
|
||||
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
|
||||
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
|
||||
} catch (IOException e) {
|
||||
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
|
||||
} finally {
|
||||
try {
|
||||
if(mavenWrapperPropertyFileInputStream != null) {
|
||||
mavenWrapperPropertyFileInputStream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Ignore ...
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading from: : " + url);
|
||||
|
||||
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
|
||||
if(!outputFile.getParentFile().exists()) {
|
||||
if(!outputFile.getParentFile().mkdirs()) {
|
||||
System.out.println(
|
||||
"- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'");
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
|
||||
try {
|
||||
downloadFileFromURL(url, outputFile);
|
||||
System.out.println("Done");
|
||||
System.exit(0);
|
||||
} catch (Throwable e) {
|
||||
System.out.println("- Error downloading");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
|
||||
URL website = new URL(urlString);
|
||||
ReadableByteChannel rbc;
|
||||
rbc = Channels.newChannel(website.openStream());
|
||||
FileOutputStream fos = new FileOutputStream(destination);
|
||||
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
|
||||
fos.close();
|
||||
rbc.close();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -16,9 +16,12 @@ before_install:
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- PROFILE=ci
|
||||
- MONGO_VERSION=4.1.10
|
||||
- MONGO_VERSION=4.0.4
|
||||
- MONGO_VERSION=3.6.12
|
||||
- MONGO_VERSION=3.4.20
|
||||
global:
|
||||
- MONGO_VERSION=4.0.0
|
||||
- PROFILE=ci
|
||||
|
||||
addons:
|
||||
apt:
|
||||
|
||||
43
CI.adoc
Normal file
43
CI.adoc
Normal file
@@ -0,0 +1,43 @@
|
||||
= Continuous Integration
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
== Running CI tasks locally
|
||||
|
||||
Since this pipeline is purely Docker-based, it's easy to:
|
||||
|
||||
* Debug what went wrong on your local machine.
|
||||
* Test out a a tweak to your test routine before sending it out.
|
||||
* Experiment against a new image before submitting your pull request.
|
||||
|
||||
All of these use cases are great reasons to essentially run what the CI server does on your local machine.
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
2. `cd spring-data-mongodb-github`
|
||||
+
|
||||
Next, run the tests from inside the container:
|
||||
+
|
||||
3. `./mvnw clean dependency:list test -Dsort -Dbundlor.enabled=false -B` (or with whatever profile you need to test out)
|
||||
|
||||
Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs.
|
||||
|
||||
If you need to package things up, do this:
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
2. `cd spring-data-mongodb-github`
|
||||
+
|
||||
Next, package things from inside the container doing this:
|
||||
+
|
||||
3. `./mvnw clean dependency:list package -Dsort -Dbundlor.enabled=false -B`
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
219
Jenkinsfile
vendored
Normal file
219
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
pipeline {
|
||||
agent none
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
buildDiscarder(logRotator(numToKeepStr: '14'))
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.1') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.1/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.1", "ci/openjdk8-mongodb-4.1/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.0") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
stage("test: mongodb 4.1") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.1:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch 'master'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.distribution-repository=temp-private-local " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
changed {
|
||||
script {
|
||||
slackSend(
|
||||
color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger',
|
||||
channel: '#spring-data-dev',
|
||||
message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}")
|
||||
emailext(
|
||||
subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}",
|
||||
mimeType: 'text/html',
|
||||
recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']],
|
||||
body: "<a href=\"${env.BUILD_URL}\">${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}</a>")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
159
README.adoc
Normal file
159
README.adoc
Normal file
@@ -0,0 +1,159 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities.
|
||||
The Spring Data MongoDB project provides integration with the MongoDB document database.
|
||||
Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer.
|
||||
|
||||
== Code of Conduct
|
||||
|
||||
This project is governed by the link:CODE_OF_CONDUCT.adoc[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
|
||||
== Getting Started
|
||||
|
||||
Here is a quick teaser of an application using Spring Data Repositories in Java:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends CrudRepository<Person, Long> {
|
||||
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
List<Person> findByFirstnameLike(String firstname);
|
||||
}
|
||||
|
||||
@Service
|
||||
public class MyService {
|
||||
|
||||
private final PersonRepository repository;
|
||||
|
||||
public MyService(PersonRepository repository) {
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
Person person = new Person();
|
||||
person.setFirstname("Oliver");
|
||||
person.setLastname("Gierke");
|
||||
repository.save(person);
|
||||
|
||||
List<Person> lastNameResults = repository.findByLastname("Gierke");
|
||||
List<Person> firstNameResults = repository.findByFirstnameLike("Oli*");
|
||||
}
|
||||
}
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "springdata";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
=== Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
|
||||
* Check the
|
||||
https://docs.spring.io/spring-data/mongodb/docs/current/reference/html/[reference documentation], and https://docs.spring.io/spring-data/mongodb/docs/current/api/[Javadocs].
|
||||
* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation.
|
||||
If you are just starting out with Spring, try one of the https://spring.io/guides[guides].
|
||||
* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features.
|
||||
* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`].
|
||||
You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
|
||||
* Report bugs with Spring Data MongoDB at https://jira.spring.io/browse/DATAMONGO[jira.spring.io/browse/DATAMONGO].
|
||||
|
||||
== Reporting Issues
|
||||
|
||||
Spring Data uses JIRA as issue tracking system to record bugs and feature requests. If you want to raise an issue, please follow the recommendations below:
|
||||
|
||||
* Before you log a bug, please search the
|
||||
https://jira.spring.io/browse/DATAMONGO[issue tracker] to see if someone has already reported the problem.
|
||||
* If the issue doesn’t already exist, https://jira.spring.io/browse/DATAMONGO[create a new issue].
|
||||
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using and JVM version.
|
||||
* If you need to paste code, or include a stack trace use JIRA `{code}…{code}` escapes before and after your text.
|
||||
* If possible try to create a test-case or project that replicates the issue. Attach a link to your code or a compressed file containing your code.
|
||||
|
||||
== Building from Source
|
||||
|
||||
You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
|
||||
You also need JDK 1.8.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ./mvnw clean install
|
||||
----
|
||||
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
|
||||
=== Building reference documentation
|
||||
|
||||
Building the documentation builds also the project without running tests.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ./mvnw clean install -Pdistribute
|
||||
----
|
||||
|
||||
The generated documentation is available from `target/site/reference/html/index.html`.
|
||||
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
== License
|
||||
|
||||
Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license].
|
||||
186
README.md
186
README.md
@@ -1,186 +0,0 @@
|
||||
[](https://projects.spring.io/spring-data-mongodb#quick-start)
|
||||
[](https://projects.spring.io/spring-data-mongodb#quick-start)
|
||||
|
||||
# Spring Data MongoDB
|
||||
|
||||
The primary goal of the [Spring Data](https://projects.spring.io/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer.
|
||||
|
||||
## Getting Help
|
||||
|
||||
For a comprehensive treatment of all the Spring Data MongoDB features, please refer to:
|
||||
|
||||
* the [User Guide](https://docs.spring.io/spring-data/mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](https://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](https://projects.spring.io/spring-data-mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](https://stackoverflow.com/questions/tagged/spring-data-mongodb).
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](https://projects.spring.io/).
|
||||
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
```
|
||||
|
||||
### MongoTemplate
|
||||
|
||||
MongoTemplate is the central support class for Mongo database operations. It provides:
|
||||
|
||||
* Basic POJO mapping support to and from BSON
|
||||
* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.)
|
||||
* Connection affinity callback
|
||||
* Exception translation into Spring's [technology agnostic DAO exception hierarchy](https://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions).
|
||||
|
||||
### Spring Data repositories
|
||||
|
||||
To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface.
|
||||
|
||||
For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below:
|
||||
|
||||
```java
|
||||
public interface PersonRepository extends CrudRepository<Person, Long> {
|
||||
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
List<Person> findByFirstnameLike(String firstname);
|
||||
}
|
||||
```
|
||||
|
||||
The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them.
|
||||
|
||||
You can have Spring automatically create a proxy for the interface by using the following JavaConfig:
|
||||
|
||||
```java
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "springdata";
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML:
|
||||
|
||||
```xml
|
||||
<bean id="template" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg>
|
||||
<bean class="com.mongodb.MongoClient">
|
||||
<constructor-arg value="localhost" />
|
||||
<constructor-arg value="27017" />
|
||||
</bean>
|
||||
</constructor-arg>
|
||||
<constructor-arg value="database" />
|
||||
</bean>
|
||||
|
||||
<mongo:repositories base-package="com.acme.repository" />
|
||||
```
|
||||
|
||||
This will find the repository interface and register a proxy object in the container. You can use it as shown below:
|
||||
|
||||
```java
|
||||
@Service
|
||||
public class MyService {
|
||||
|
||||
private final PersonRepository repository;
|
||||
|
||||
@Autowired
|
||||
public MyService(PersonRepository repository) {
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
Person person = new Person();
|
||||
person.setFirstname("Oliver");
|
||||
person.setLastname("Gierke");
|
||||
person = repository.save(person);
|
||||
|
||||
List<Person> lastNameResults = repository.findByLastname("Gierke");
|
||||
List<Person> firstNameResults = repository.findByFirstnameLike("Oli*");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### MongoDB 4.0 Transactions
|
||||
|
||||
As of version 4 MongoDB supports [Transactions](https://www.mongodb.com/transactions). Transactions are built on top of
|
||||
`ClientSessions` and therefore require an active session.
|
||||
|
||||
`MongoTransactionManager` is the gateway to the well known Spring transaction support. It allows applications to use
|
||||
[managed transaction features of Spring](https://docs.spring.io/spring/docs/current/spring-framework-reference/html/transaction.html).
|
||||
The `MongoTransactionManager` binds a `ClientSession` to the thread. `MongoTemplate` automatically detects those and operates on them accordingly.
|
||||
|
||||
```java
|
||||
@Configuration
|
||||
static class Config extends AbstractMongoConfiguration {
|
||||
|
||||
@Bean
|
||||
MongoTransactionManager transactionManager(MongoDbFactory dbFactory) {
|
||||
return new MongoTransactionManager(dbFactory);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
|
||||
@Component
|
||||
public class StateService {
|
||||
|
||||
@Transactional
|
||||
void someBusinessFunction(Step step) {
|
||||
|
||||
template.insert(step);
|
||||
|
||||
process(step);
|
||||
|
||||
template.update(Step.class).apply(Update.set("state", // ...
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
## Contributing to Spring Data
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](https://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate.
|
||||
* Create [JIRA](https://jira.spring.io/browse/DATAMONGO) tickets for bugs and new features and comment and vote on the ones that you are interested in.
|
||||
* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](https://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing.
|
||||
* Watch for upcoming articles on Spring by [subscribing](https://spring.io/blog) to spring.io.
|
||||
|
||||
Before we accept a non-trivial patch or pull request we will need you to [sign the Contributor License Agreement](https://cla.pivotal.io/sign/spring). Signing the contributor’s agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. If you forget to do so, you'll be reminded when you submit a pull request. Active contributors might be asked to join the core team, and given the ability to merge pull requests.
|
||||
9
SECURITY.adoc
Normal file
9
SECURITY.adoc
Normal file
@@ -0,0 +1,9 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Please see the https://spring.io/projects/spring-data-mongodb[Spring Data MongoDB] project page for supported versions.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly.
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM openjdk:11-jdk
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https
|
||||
|
||||
RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN echo "deb https://repo.mongodb.org/apt/debian stretch/mongodb-org/4.0 main" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.3 mongodb-org-server=4.0.3 mongodb-org-shell=4.0.3 mongodb-org-mongos=4.0.3 mongodb-org-tools=4.0.3
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM openjdk:8-jdk
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https
|
||||
|
||||
RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN echo "deb https://repo.mongodb.org/apt/debian stretch/mongodb-org/4.0 main" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.3 mongodb-org-server=4.0.3 mongodb-org-shell=4.0.3 mongodb-org-mongos=4.0.3 mongodb-org-tools=4.0.3
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
15
ci/build.sh
15
ci/build.sh
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
[[ -d $PWD/maven && ! -d $HOME/.m2 ]] && ln -s $PWD/maven $HOME/.m2
|
||||
|
||||
spring_data_mongodb_artifactory=$(pwd)/spring-data-mongodb-artifactory
|
||||
|
||||
rm -rf $HOME/.m2/repository/org/springframework/data 2> /dev/null || :
|
||||
|
||||
cd spring-data-mongodb-github
|
||||
|
||||
./mvnw deploy \
|
||||
-Dmaven.test.skip=true \
|
||||
-DaltDeploymentRepository=distribution::default::file://${spring_data_mongodb_artifactory} \
|
||||
19
ci/build.yml
19
ci/build.yml
@@ -1,19 +0,0 @@
|
||||
---
|
||||
platform: linux
|
||||
|
||||
image_resource:
|
||||
type: docker-image
|
||||
source:
|
||||
repository: springci/spring-data-8-jdk-with-mongodb
|
||||
|
||||
inputs:
|
||||
- name: spring-data-mongodb-github
|
||||
|
||||
outputs:
|
||||
- name: spring-data-mongodb-artifactory
|
||||
|
||||
caches:
|
||||
- path: maven
|
||||
|
||||
run:
|
||||
path: spring-data-mongodb-github/ci/build.sh
|
||||
14
ci/openjdk8-mongodb-4.0/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.0/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.9 mongodb-org-server=4.0.9 mongodb-org-shell=4.0.9 mongodb-org-mongos=4.0.9 mongodb-org-tools=4.0.9
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 4B7C549A058F8B6B
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.1 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.1.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org-unstable=4.1.13 mongodb-org-unstable-server=4.1.13 mongodb-org-unstable-shell=4.1.13 mongodb-org-unstable-mongos=4.1.13 mongodb-org-unstable-tools=4.1.13
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/openjdk8-mongodb-4.2/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/test.sh
14
ci/test.sh
@@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
mkdir -p /data/db
|
||||
mongod &
|
||||
|
||||
[[ -d $PWD/maven && ! -d $HOME/.m2 ]] && ln -s $PWD/maven $HOME/.m2
|
||||
|
||||
rm -rf $HOME/.m2/repository/org/springframework/data/mongodb 2> /dev/null || :
|
||||
|
||||
cd spring-data-mongodb-github
|
||||
|
||||
./mvnw clean dependency:list test -P${PROFILE} -Dsort
|
||||
16
ci/test.yml
16
ci/test.yml
@@ -1,16 +0,0 @@
|
||||
---
|
||||
platform: linux
|
||||
|
||||
image_resource:
|
||||
type: docker-image
|
||||
source:
|
||||
repository: springci/spring-data-8-jdk-with-mongodb
|
||||
|
||||
inputs:
|
||||
- name: spring-data-mongodb-github
|
||||
|
||||
caches:
|
||||
- path: maven
|
||||
|
||||
run:
|
||||
path: spring-data-mongodb-github/ci/test.sh
|
||||
24
pom.xml
24
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.2.0.RC3</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.2.0.RC3</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,9 +26,9 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.2.0.M3</springdata.commons>
|
||||
<mongo>3.10.2</mongo>
|
||||
<mongo.reactivestreams>1.11.0</mongo.reactivestreams>
|
||||
<springdata.commons>2.2.0.RC3</springdata.commons>
|
||||
<mongo>3.11.0</mongo>
|
||||
<mongo.reactivestreams>1.12.0</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -113,20 +113,6 @@
|
||||
</developers>
|
||||
|
||||
<profiles>
|
||||
|
||||
<profile>
|
||||
<id>release</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.jfrog.buildinfo</groupId>
|
||||
<artifactId>artifactory-maven-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>benchmarks</id>
|
||||
<modules>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.2.0.RC3</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.2.0.RC3</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,10 +29,6 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>wagon-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.2.0.RC3</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -276,14 +276,12 @@
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-core</artifactId>
|
||||
<version>${kotlin-coroutines}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-reactor</artifactId>
|
||||
<version>${kotlin-coroutines}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
@@ -88,4 +87,16 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @since 2.1
|
||||
*/
|
||||
ReactiveMongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link ReactiveMongoDatabaseFactory} is bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.2
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,278 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.context.Context;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.NoTransactionException;
|
||||
import org.springframework.transaction.reactive.ReactiveResourceSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.ResourceHolderSynchronization;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Check if the {@link ReactiveMongoDatabaseFactory} is actually bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an active transaction, or if a
|
||||
* {@link org.springframework.transaction.reactive.TransactionSynchronization} has been registered for the
|
||||
* {@link ReactiveMongoDatabaseFactory resource} and if the associated
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param databaseFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting {@literal true} if the factory has an ongoing transaction.
|
||||
*/
|
||||
public static Mono<Boolean> isTransactionActive(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
if (databaseFactory.isTransactionActive()) {
|
||||
return Mono.just(true);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction() //
|
||||
.map(it -> {
|
||||
|
||||
ReactiveMongoResourceHolder holder = (ReactiveMongoResourceHolder) it.getResource(databaseFactory);
|
||||
return holder != null && holder.hasActiveTransaction();
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> Mono.just(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.map(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
})
|
||||
.onErrorResume(NoTransactionException.class,
|
||||
e -> Mono.fromSupplier(() -> getMongoDatabaseOrDefault(dbName, factory)))
|
||||
.defaultIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static MongoDatabase getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> doGetSession(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoDatabaseFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
final ReactiveMongoResourceHolder registeredHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(dbFactory);
|
||||
|
||||
// check for native MongoDB transaction
|
||||
if (registeredHolder != null
|
||||
&& (registeredHolder.hasSession() || registeredHolder.isSynchronizedWithTransaction())) {
|
||||
|
||||
return registeredHolder.hasSession() ? Mono.just(registeredHolder.getSession())
|
||||
: createClientSession(dbFactory).map(registeredHolder::setSessionIfAbsent);
|
||||
}
|
||||
|
||||
if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
// init a non native MongoDB transaction by registering a MongoSessionSynchronization
|
||||
return createClientSession(dbFactory).map(session -> {
|
||||
|
||||
ReactiveMongoResourceHolder newHolder = new ReactiveMongoResourceHolder(session, dbFactory);
|
||||
newHolder.getRequiredSession().startTransaction();
|
||||
|
||||
synchronizationManager
|
||||
.registerSynchronization(new MongoSessionSynchronization(synchronizationManager, newHolder, dbFactory));
|
||||
newHolder.setSynchronizedWithTransaction(true);
|
||||
synchronizationManager.bindResource(dbFactory, newHolder);
|
||||
|
||||
return newHolder.getSession();
|
||||
});
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> createClientSession(ReactiveMongoDatabaseFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when
|
||||
* participating in a non-native MongoDB transaction, such as a R2CBC transaction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
private static class MongoSessionSynchronization
|
||||
extends ReactiveResourceSynchronization<ReactiveMongoResourceHolder, Object> {
|
||||
|
||||
private final ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoResourceHolder resourceHolder, ReactiveMongoDatabaseFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory, synchronizationManager);
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (isTransactionActive(resourceHolder)) {
|
||||
return Mono.from(resourceHolder.getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> afterCompletion(int status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) {
|
||||
|
||||
return Mono.from(resourceHolder.getRequiredSession().abortTransaction()) //
|
||||
.then(super.afterCompletion(status));
|
||||
}
|
||||
|
||||
return super.afterCompletion(status);
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
if (resourceHolder.hasActiveSession()) {
|
||||
resourceHolder.getRequiredSession().close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private boolean isTransactionActive(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (!resourceHolder.hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return resourceHolder.getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.support.ResourceHolderSupport;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoTransactionManager
|
||||
* @see ReactiveMongoTemplate
|
||||
*/
|
||||
class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private ReactiveMongoDatabaseFactory databaseFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ClientSession}. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
ClientSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the required associated {@link ClientSession}.
|
||||
* @throws IllegalStateException if no session is associated.
|
||||
*/
|
||||
ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalStateException("No ClientSession associated");
|
||||
}
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ReactiveMongoDatabaseFactory}.
|
||||
*/
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClientSession} to guard.
|
||||
*
|
||||
* @param session can be {@literal null}.
|
||||
*/
|
||||
public void setSession(@Nullable ClientSession session) {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if session is not {@literal null}.
|
||||
*/
|
||||
boolean hasSession() {
|
||||
return session != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a
|
||||
* {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current
|
||||
* bound session is returned.
|
||||
*
|
||||
* @param session
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public ClientSession setSessionIfAbsent(@Nullable ClientSession session) {
|
||||
|
||||
if (!hasSession()) {
|
||||
setSession(session);
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session is active and has not been closed.
|
||||
*/
|
||||
boolean hasActiveSession() {
|
||||
|
||||
if (!hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hasServerSession() && !getRequiredSession().getServerSession().isClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session has an active transaction.
|
||||
* @see #hasActiveSession()
|
||||
*/
|
||||
boolean hasActiveTransaction() {
|
||||
|
||||
if (!hasActiveSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated
|
||||
* that is accessible via {@link ClientSession#getServerSession()}.
|
||||
*/
|
||||
boolean hasServerSession() {
|
||||
|
||||
try {
|
||||
return getRequiredSession().getServerSession() != null;
|
||||
} catch (IllegalStateException serverSessionClosed) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,530 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionException;
|
||||
import org.springframework.transaction.TransactionSystemException;
|
||||
import org.springframework.transaction.reactive.AbstractReactiveTransactionManager;
|
||||
import org.springframework.transaction.reactive.GenericReactiveTransaction;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.SmartTransactionObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.TransactionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <p />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
* behavior as outlined in the MongoDB reference manual.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean {
|
||||
|
||||
private @Nullable ReactiveMongoDatabaseFactory databaseFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDatabaseFactory(ReactiveMongoDatabaseFactory)
|
||||
*/
|
||||
public ReactiveMongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory}.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
this(databaseFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when
|
||||
* starting a new transaction.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
@Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
|
||||
this.databaseFactory = databaseFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
||||
throws TransactionException {
|
||||
|
||||
ReactiveMongoResourceHolder resourceHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(getRequiredDatabaseFactory());
|
||||
return new ReactiveMongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
||||
TransactionDefinition definition) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
|
||||
Mono<ReactiveMongoResourceHolder> holder = newResourceHolder(definition,
|
||||
ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
|
||||
return holder.doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.setResourceHolder(resourceHolder);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
}).doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.startTransaction(options);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
})//
|
||||
.onErrorMap(
|
||||
ex -> new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex))
|
||||
.doOnSuccess(resourceHolder -> {
|
||||
|
||||
synchronizationManager.bindResource(getRequiredDatabaseFactory(), resourceHolder);
|
||||
}).then();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
||||
throws TransactionException {
|
||||
|
||||
return Mono.fromSupplier(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
mongoTransactionObject.setResourceHolder(null);
|
||||
|
||||
return synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
||||
Object suspendedResources) {
|
||||
return Mono
|
||||
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to commit transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return doCommit(synchronizationManager, mongoTransactionObject).onErrorMap(ex -> {
|
||||
return new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook to perform an actual commit of the given transaction.<br />
|
||||
* If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding
|
||||
* {@literal error labels}. <br />
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit.
|
||||
*
|
||||
* @param synchronizationManager reactive synchronization manager.
|
||||
* @param transactionObject never {@literal null}.
|
||||
*/
|
||||
protected Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoTransactionObject transactionObject) {
|
||||
return transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to abort transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return mongoTransactionObject.abortTransaction().onErrorResume(MongoException.class, ex -> {
|
||||
return Mono
|
||||
.error(new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject transactionObject = extractMongoTransaction(status);
|
||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
||||
Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = (ReactiveMongoTransactionObject) transaction;
|
||||
|
||||
// Remove the connection holder from the thread.
|
||||
synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
mongoTransactionObject.getRequiredResourceHolder().clear();
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to release Session %s after transaction.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
mongoTransactionObject.closeSession();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReactiveMongoDatabaseFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link TransactionOptions} to be applied when starting transactions.
|
||||
*
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public void setOptions(@Nullable TransactionOptions options) {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReactiveMongoDatabaseFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDatabaseFactory();
|
||||
}
|
||||
|
||||
private Mono<ReactiveMongoResourceHolder> newResourceHolder(TransactionDefinition definition,
|
||||
ClientSessionOptions options) {
|
||||
|
||||
ReactiveMongoDatabaseFactory dbFactory = getRequiredDatabaseFactory();
|
||||
|
||||
return dbFactory.getSession(options).map(session -> new ReactiveMongoResourceHolder(session, dbFactory));
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #databaseFactory} is {@literal null}.
|
||||
*/
|
||||
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
||||
|
||||
Assert.state(databaseFactory != null,
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) transaction;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(GenericReactiveTransaction status) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, status.getTransaction(),
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
status.getTransaction().getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) status.getTransaction();
|
||||
}
|
||||
|
||||
private static String debugString(@Nullable ClientSession session) {
|
||||
|
||||
if (session == null) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()),
|
||||
Integer.toHexString(session.hashCode()));
|
||||
|
||||
try {
|
||||
if (session.getServerSession() != null) {
|
||||
debugString += String.format("id = %s, ", session.getServerSession().getIdentifier());
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
debugString += String.format("error = %s", e.getMessage());
|
||||
}
|
||||
|
||||
debugString += "]";
|
||||
|
||||
return debugString;
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by
|
||||
* {@link ReactiveMongoTransactionManager}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoResourceHolder
|
||||
*/
|
||||
protected static class ReactiveMongoTransactionObject implements SmartTransactionObject {
|
||||
|
||||
private @Nullable ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
ReactiveMongoTransactionObject(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoResourceHolder}.
|
||||
*
|
||||
* @param resourceHolder can be {@literal null}.
|
||||
*/
|
||||
void setResourceHolder(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a {@link MongoResourceHolder} is set.
|
||||
*/
|
||||
final boolean hasResourceHolder() {
|
||||
return resourceHolder != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a MongoDB transaction optionally given {@link TransactionOptions}.
|
||||
*
|
||||
* @param options can be {@literal null}
|
||||
*/
|
||||
void startTransaction(@Nullable TransactionOptions options) {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (options != null) {
|
||||
session.startTransaction(options);
|
||||
} else {
|
||||
session.startTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the transaction.
|
||||
*/
|
||||
public Mono<Void> commitTransaction() {
|
||||
return Mono.from(getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback (abort) the transaction.
|
||||
*/
|
||||
public Mono<Void> abortTransaction() {
|
||||
return Mono.from(getRequiredSession().abortTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a {@link ClientSession} without regard to its transactional state.
|
||||
*/
|
||||
void closeSession() {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (session.getServerSession() != null && !session.getServerSession().isClosed()) {
|
||||
session.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public ClientSession getSession() {
|
||||
return resourceHolder != null ? resourceHolder.getSession() : null;
|
||||
}
|
||||
|
||||
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
throw new UnsupportedOperationException("flush() not supported");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -105,6 +105,7 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
@@ -111,6 +111,7 @@ public abstract class AbstractMongoConfiguration extends MongoConfigurationSuppo
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
@@ -83,6 +83,7 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(reactiveMongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
@@ -26,25 +27,33 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEventListener} to transparently set auditing information on
|
||||
* an entity.
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEntityCallback} to transparently set auditing information
|
||||
* on an entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinitionParser {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element)
|
||||
*/
|
||||
@Override
|
||||
protected Class<?> getBeanClass(Element element) {
|
||||
return AuditingEventListener.class;
|
||||
return AuditingEntityCallback.class;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -80,7 +89,24 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
||||
mappingContextRef);
|
||||
parser.parse(element, parserContext);
|
||||
|
||||
builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element)));
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler = getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element));
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(parserContext.getRegistry(), isNewAwareAuditingHandler,
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry,
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler, @Nullable Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registry.registerBeanDefinition(ReactiveAuditingEntityCallback.class.getName(), builder.getBeanDefinition());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,17 +32,23 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
@@ -104,12 +110,27 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEventListener.class);
|
||||
.rootBeanDefinition(AuditingEntityCallback.class);
|
||||
listenerBeanDefinitionBuilder
|
||||
.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(registry, auditingHandlerDefinition.getSource());
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -35,6 +35,7 @@ import org.w3c.dom.Element;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class MongoParsingUtils {
|
||||
@@ -92,6 +93,7 @@ abstract class MongoParsingUtils {
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout");
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
@@ -21,9 +21,11 @@ import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -51,6 +53,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
|
||||
protected ChangeStreamOptions() {}
|
||||
|
||||
@@ -97,6 +100,22 @@ public class ChangeStreamOptions {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be started after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isStartAfter() {
|
||||
return Resume.START_AFTER.equals(resume);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be resumed after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isResumeAfter() {
|
||||
return Resume.RESUME_AFTER.equals(resume);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return empty {@link ChangeStreamOptions}.
|
||||
*/
|
||||
@@ -137,6 +156,25 @@ public class ChangeStreamOptions {
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
enum Resume {
|
||||
|
||||
UNDEFINED,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#startAfter(BsonDocument)
|
||||
*/
|
||||
START_AFTER,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#resumeAfter(BsonDocument)
|
||||
*/
|
||||
RESUME_AFTER
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@link ChangeStreamOptions}.
|
||||
*
|
||||
@@ -150,6 +188,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
|
||||
private ChangeStreamOptionsBuilder() {}
|
||||
|
||||
@@ -217,6 +256,11 @@ public class ChangeStreamOptions {
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null!");
|
||||
|
||||
this.resumeToken = resumeToken;
|
||||
|
||||
if (this.resume == Resume.UNDEFINED) {
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -273,6 +317,36 @@ public class ChangeStreamOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to continue emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to start emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder startAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.START_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the built {@link ChangeStreamOptions}
|
||||
*/
|
||||
@@ -280,11 +354,12 @@ public class ChangeStreamOptions {
|
||||
|
||||
ChangeStreamOptions options = new ChangeStreamOptions();
|
||||
|
||||
options.filter = filter;
|
||||
options.resumeToken = resumeToken;
|
||||
options.fullDocumentLookup = fullDocumentLookup;
|
||||
options.collation = collation;
|
||||
options.resumeTimestamp = resumeTimestamp;
|
||||
options.filter = this.filter;
|
||||
options.resumeToken = this.resumeToken;
|
||||
options.fullDocumentLookup = this.fullDocumentLookup;
|
||||
options.collation = this.collation;
|
||||
options.resumeTimestamp = this.resumeTimestamp;
|
||||
options.resume = this.resume;
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@@ -15,9 +15,15 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.bson.Document;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.FindIterable;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Simple callback interface to allow customization of a {@link FindIterable}.
|
||||
@@ -25,7 +31,14 @@ import com.mongodb.client.FindIterable;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
interface CursorPreparer {
|
||||
public interface CursorPreparer extends ReadPreferenceAware {
|
||||
|
||||
/**
|
||||
* Default {@link CursorPreparer} just passing on the given {@link FindIterable}.
|
||||
*
|
||||
* @since 2.2
|
||||
*/
|
||||
CursorPreparer NO_OP_PREPARER = (iterable -> iterable);
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
@@ -33,4 +46,37 @@ interface CursorPreparer {
|
||||
* @param cursor
|
||||
*/
|
||||
FindIterable<Document> prepare(FindIterable<Document> cursor);
|
||||
|
||||
/**
|
||||
* Apply query specific settings to {@link MongoCollection} and initate a find operation returning a
|
||||
* {@link FindIterable} via the given {@link Function find} function.
|
||||
*
|
||||
* @param collection must not be {@literal null}.
|
||||
* @param find must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if one of the required arguments is {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default FindIterable<Document> initiateFind(MongoCollection<Document> collection,
|
||||
Function<MongoCollection<Document>, FindIterable<Document>> find) {
|
||||
|
||||
Assert.notNull(collection, "Collection must not be null!");
|
||||
Assert.notNull(find, "Find function must not be null!");
|
||||
|
||||
if (hasReadPreference()) {
|
||||
collection = collection.withReadPreference(getReadPreference());
|
||||
}
|
||||
|
||||
return prepare(find.apply(collection));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link ReadPreference} to apply or {@literal null} if none defined.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
default ReadPreference getReadPreference() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,9 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -26,10 +23,18 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
@@ -38,8 +43,13 @@ import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.*;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
*
|
||||
@@ -48,6 +58,8 @@ import com.mongodb.client.model.*;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -55,7 +67,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
private final BulkOperationContext bulkOperationContext;
|
||||
private final List<WriteModel<Document>> models = new ArrayList<>();
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
@@ -112,7 +124,9 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
models.add(new InsertOneModel<>(getMappedObject(document)));
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(document);
|
||||
addModel(source, new InsertOneModel<>(getMappedObject(source)));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -226,7 +240,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
|
||||
models.add(new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
addModel(query, new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -262,8 +276,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
||||
|
||||
models.add(
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(replacement), replaceOptions));
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(replacement);
|
||||
addModel(source,
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(source), replaceOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -277,14 +293,48 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
try {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
return collection.bulkWrite(models.stream().map(this::mapWriteModel).collect(Collectors.toList()), bulkOptions);
|
||||
});
|
||||
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
||||
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
private BulkWriteResult bulkWriteTo(MongoCollection<Document> collection) {
|
||||
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
}
|
||||
|
||||
private WriteModel<Document> extractAndMapWriteModel(SourceAwareWriteModelHolder it) {
|
||||
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
}
|
||||
|
||||
return mapWriteModel(it.getModel());
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
@@ -304,9 +354,9 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
|
||||
if (multi) {
|
||||
models.add(new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
} else {
|
||||
models.add(new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
addModel(update, new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
}
|
||||
|
||||
return this;
|
||||
@@ -362,10 +412,69 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
|
||||
mongoOperations.getConverter().write(source, sink);
|
||||
return sink;
|
||||
}
|
||||
|
||||
private void addModel(Object source, WriteModel<Document> model) {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (null != bulkOperationContext.getEventPublisher()) {
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
}
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
@@ -395,5 +504,20 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
@NonNull Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
@NonNull QueryMapper queryMapper;
|
||||
@NonNull UpdateMapper updateMapper;
|
||||
ApplicationEventPublisher eventPublisher;
|
||||
EntityCallbacks entityCallbacks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@Value
|
||||
private static class SourceAwareWriteModelHolder {
|
||||
|
||||
Object source;
|
||||
WriteModel<Document> model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,19 +120,15 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return execute(collection -> {
|
||||
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
MongoPersistentEntity<?> entity = lookupPersistentEntity(type, collectionName);
|
||||
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
ops.partialFilterExpression(mapper.getMappedObject((Document) indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY),
|
||||
lookupPersistentEntity(type, collectionName)));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
Document mappedKeys = mapper.getMappedObject(indexDefinition.getIndexKeys(), entity);
|
||||
return collection.createIndex(mappedKeys, indexOptions);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -192,7 +188,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private List<IndexInfo> getIndexData(MongoCursor<Document> cursor) {
|
||||
|
||||
List<IndexInfo> indexInfoList = new ArrayList<IndexInfo>();
|
||||
List<IndexInfo> indexInfoList = new ArrayList<>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
|
||||
@@ -217,4 +213,25 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, callback);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
mapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,23 +94,16 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
|
||||
ops = ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject(indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY, Document.class), entity));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), indexOptions);
|
||||
|
||||
}).next();
|
||||
}
|
||||
@@ -126,21 +119,24 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String)
|
||||
*/
|
||||
public Mono<Void> dropIndex(final String name) {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes()
|
||||
*/
|
||||
public Mono<Void> dropAllIndexes() {
|
||||
return dropIndex("*");
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo()
|
||||
*/
|
||||
public Flux<IndexInfo> getIndexInfo() {
|
||||
@@ -148,4 +144,25 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) //
|
||||
.map(IndexConverters.documentToIndexInfoConverter()::convert);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,13 +42,15 @@ import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}.
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ExecutableMongoScript}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
class DefaultScriptOperations implements ScriptOperations {
|
||||
|
||||
private static final String SCRIPT_COLLECTION_NAME = "system.js";
|
||||
|
||||
@@ -21,12 +21,14 @@ import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.mapping.IdentifierAccessor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
@@ -34,6 +36,7 @@ import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -47,6 +50,7 @@ import org.springframework.util.MultiValueMap;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoTemplate
|
||||
* @see ReactiveMongoTemplate
|
||||
@@ -114,17 +118,6 @@ class EntityOperations {
|
||||
return context.getRequiredPersistentEntity(entityClass).getCollection();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection name to be used for the given entity.
|
||||
*
|
||||
* @param obj can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public String determineEntityCollectionName(@Nullable Object obj) {
|
||||
return null == obj ? null : determineCollectionName(obj.getClass());
|
||||
}
|
||||
|
||||
public Query getByIdInQuery(Collection<?> entities) {
|
||||
|
||||
MultiValueMap<String, Object> byIds = new LinkedMultiValueMap<>();
|
||||
@@ -160,6 +153,29 @@ class EntityOperations {
|
||||
return ID_FIELD;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the name used for {@code $geoNear.distanceField} avoiding clashes with potentially existing properties.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return the name of the distanceField to use. {@literal dis} by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
public String nearQueryDistanceFieldName(Class<?> domainType) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(domainType);
|
||||
if (persistentEntity == null || persistentEntity.getPersistentProperty("dis") == null) {
|
||||
return "dis";
|
||||
}
|
||||
|
||||
String distanceFieldName = "calculated-distance";
|
||||
int counter = 0;
|
||||
while (persistentEntity.getPersistentProperty(distanceFieldName) != null) {
|
||||
distanceFieldName += "-" + (counter++);
|
||||
}
|
||||
|
||||
return distanceFieldName;
|
||||
}
|
||||
|
||||
private static Document parse(String source) {
|
||||
|
||||
try {
|
||||
@@ -176,6 +192,20 @@ class EntityOperations {
|
||||
}
|
||||
}
|
||||
|
||||
public <T> TypedOperations<T> forType(@Nullable Class<T> entityClass) {
|
||||
|
||||
if (entityClass != null) {
|
||||
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(entityClass);
|
||||
|
||||
if (entity != null) {
|
||||
return new TypedEntityOperations(entity);
|
||||
}
|
||||
|
||||
}
|
||||
return UntypedOperations.instance();
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of information about an entity.
|
||||
*
|
||||
@@ -263,7 +293,7 @@ class EntityOperations {
|
||||
|
||||
/**
|
||||
* Returns whether the entity is considered to be new.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
* @since 2.1.2
|
||||
*/
|
||||
@@ -414,7 +444,7 @@ class EntityOperations {
|
||||
return map;
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||
*/
|
||||
@@ -585,7 +615,7 @@ class EntityOperations {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||
*/
|
||||
@@ -632,22 +662,19 @@ class EntityOperations {
|
||||
public T populateIdIfNecessary(@Nullable Object id) {
|
||||
|
||||
if (id == null) {
|
||||
return null;
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
T bean = propertyAccessor.getBean();
|
||||
MongoPersistentProperty idProperty = entity.getIdProperty();
|
||||
|
||||
if (idProperty == null) {
|
||||
return bean;
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
if (identifierAccessor.getIdentifier() != null) {
|
||||
return bean;
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
propertyAccessor.setProperty(idProperty, id);
|
||||
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
@@ -698,4 +725,102 @@ class EntityOperations {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-specific operations abstraction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @param <T>
|
||||
* @since 2.2
|
||||
*/
|
||||
interface TypedOperations<T> {
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} for the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation();
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} from the given {@link Query} and fall back to the collation configured for
|
||||
* the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom
|
||||
* conversions).
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
enum UntypedOperations implements TypedOperations<Object> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public static <T> TypedOperations<T> instance() {
|
||||
return (TypedOperations) INSTANCE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} backed by {@link MongoPersistentEntity}.
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class TypedEntityOperations<T> implements TypedOperations<T> {
|
||||
|
||||
private final @NonNull MongoPersistentEntity<T> entity;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query.getCollation().isPresent()) {
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -267,6 +268,11 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
this.limit = Optional.of(limit);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReadPreference getReadPreference() {
|
||||
return delegate.getReadPreference();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -33,6 +33,31 @@ public class FindAndModifyOptions {
|
||||
|
||||
private @Nullable Collation collation;
|
||||
|
||||
private static final FindAndModifyOptions NONE = new FindAndModifyOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed. Please use FindAndModifyOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions returnNew(boolean returnNew) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions upsert(boolean upsert) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions remove(boolean remove) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions collation(@Nullable Collation collation) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a FindAndModifyOptions instance
|
||||
*
|
||||
@@ -42,9 +67,19 @@ public class FindAndModifyOptions {
|
||||
return new FindAndModifyOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndModifyOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndModifyOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndModifyOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link FindAndModifyOptions} based on option of given {@litearl source}.
|
||||
*
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return new instance of {@link FindAndModifyOptions}.
|
||||
* @since 2.0
|
||||
|
||||
@@ -36,6 +36,21 @@ public class FindAndReplaceOptions {
|
||||
private boolean returnNew;
|
||||
private boolean upsert;
|
||||
|
||||
private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed. Please use FindAndReplaceOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions returnNew() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions upsert() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance.
|
||||
* <dl>
|
||||
@@ -51,6 +66,16 @@ public class FindAndReplaceOptions {
|
||||
return new FindAndReplaceOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndReplaceOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance with
|
||||
* <dl>
|
||||
|
||||
@@ -15,19 +15,69 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.reactivestreams.client.FindPublisher;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Simple callback interface to allow customization of a {@link FindPublisher}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Konstantin Volivach
|
||||
*/
|
||||
interface FindPublisherPreparer {
|
||||
public interface FindPublisherPreparer extends ReadPreferenceAware {
|
||||
|
||||
/**
|
||||
* Default {@link FindPublisherPreparer} just passing on the given {@link FindPublisher}.
|
||||
*
|
||||
* @since 2.2
|
||||
*/
|
||||
FindPublisherPreparer NO_OP_PREPARER = (findPublisher -> findPublisher);
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param findPublisher must not be {@literal null}.
|
||||
*/
|
||||
<T> FindPublisher<T> prepare(FindPublisher<T> findPublisher);
|
||||
FindPublisher<Document> prepare(FindPublisher<Document> findPublisher);
|
||||
|
||||
/**
|
||||
* Apply query specific settings to {@link MongoCollection} and initate a find operation returning a
|
||||
* {@link FindPublisher} via the given {@link Function find} function.
|
||||
*
|
||||
* @param collection must not be {@literal null}.
|
||||
* @param find must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if one of the required arguments is {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default FindPublisher<Document> initiateFind(MongoCollection<Document> collection,
|
||||
Function<MongoCollection<Document>, FindPublisher<Document>> find) {
|
||||
|
||||
Assert.notNull(collection, "Collection must not be null!");
|
||||
Assert.notNull(find, "Find function must not be null!");
|
||||
|
||||
if (hasReadPreference()) {
|
||||
collection = collection.withReadPreference(getReadPreference());
|
||||
}
|
||||
|
||||
return prepare(find.apply(collection));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link ReadPreference} to apply or {@literal null} if none defined.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
default ReadPreference getReadPreference() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.DBDecoderFactory;
|
||||
import com.mongodb.DBEncoderFactory;
|
||||
import com.mongodb.MongoClient;
|
||||
@@ -73,6 +74,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
|
||||
private boolean ssl;
|
||||
private @Nullable SSLSocketFactory sslSocketFactory;
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClient} description.
|
||||
@@ -272,6 +274,16 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
this.serverSelectionTimeout = serverSelectionTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link AutoEncryptionSettings} to be used.
|
||||
*
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance()
|
||||
@@ -304,7 +316,8 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.sslEnabled(ssl) //
|
||||
.socketFactory(socketFactoryToUse) // TODO: Mongo Driver 4 - remove if not available
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.socketFactory(socketFactoryToUse) // TODO: Mongo Driver 4 -
|
||||
.socketKeepAlive(socketKeepAlive) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
|
||||
@@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
|
||||
/**
|
||||
* {@link FactoryBean} for creating {@link AutoEncryptionSettings} using the {@link AutoEncryptionSettings.Builder}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class MongoEncryptionSettingsFactoryBean implements FactoryBean<AutoEncryptionSettings> {
|
||||
|
||||
private boolean bypassAutoEncryption;
|
||||
private String keyVaultNamespace;
|
||||
private Map<String, Object> extraOptions;
|
||||
private MongoClientSettings keyVaultClientSettings;
|
||||
private Map<String, Map<String, Object>> kmsProviders;
|
||||
private Map<String, BsonDocument> schemaMap;
|
||||
|
||||
/**
|
||||
* @param bypassAutoEncryption
|
||||
* @see AutoEncryptionSettings.Builder#bypassAutoEncryption(boolean)
|
||||
*/
|
||||
public void setBypassAutoEncryption(boolean bypassAutoEncryption) {
|
||||
this.bypassAutoEncryption = bypassAutoEncryption;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param extraOptions
|
||||
* @see AutoEncryptionSettings.Builder#extraOptions(Map)
|
||||
*/
|
||||
public void setExtraOptions(Map<String, Object> extraOptions) {
|
||||
this.extraOptions = extraOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param keyVaultNamespace
|
||||
* @see AutoEncryptionSettings.Builder#keyVaultNamespace(String)
|
||||
*/
|
||||
public void setKeyVaultNamespace(String keyVaultNamespace) {
|
||||
this.keyVaultNamespace = keyVaultNamespace;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param keyVaultClientSettings
|
||||
* @see AutoEncryptionSettings.Builder#keyVaultMongoClientSettings(MongoClientSettings)
|
||||
*/
|
||||
public void setKeyVaultClientSettings(MongoClientSettings keyVaultClientSettings) {
|
||||
this.keyVaultClientSettings = keyVaultClientSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param kmsProviders
|
||||
* @see AutoEncryptionSettings.Builder#kmsProviders(Map)
|
||||
*/
|
||||
public void setKmsProviders(Map<String, Map<String, Object>> kmsProviders) {
|
||||
this.kmsProviders = kmsProviders;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param schemaMap
|
||||
* @see AutoEncryptionSettings.Builder#schemaMap(Map)
|
||||
*/
|
||||
public void setSchemaMap(Map<String, BsonDocument> schemaMap) {
|
||||
this.schemaMap = schemaMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public AutoEncryptionSettings getObject() {
|
||||
|
||||
return AutoEncryptionSettings.builder() //
|
||||
.bypassAutoEncryption(bypassAutoEncryption) //
|
||||
.keyVaultNamespace(keyVaultNamespace) //
|
||||
.keyVaultMongoClientSettings(keyVaultClientSettings) //
|
||||
.kmsProviders(orEmpty(kmsProviders)) //
|
||||
.extraOptions(orEmpty(extraOptions)) //
|
||||
.schemaMap(orEmpty(schemaMap)) //
|
||||
.build();
|
||||
}
|
||||
|
||||
private <K, V> Map<K, V> orEmpty(@Nullable Map<K, V> source) {
|
||||
return source != null ? source : Collections.emptyMap();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return AutoEncryptionSettings.class;
|
||||
}
|
||||
}
|
||||
@@ -57,6 +57,10 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Mark Pollack
|
||||
@@ -289,12 +293,15 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
Set<String> getCollectionNames();
|
||||
|
||||
/**
|
||||
* Get a collection by name, creating it if it doesn't exist.
|
||||
* Get a {@link MongoCollection} by its name. The returned collection may not exists yet (except in local memory) and
|
||||
* is created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
* @return an existing collection or a newly created one.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
*/
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
@@ -355,7 +362,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
ScriptOperations scriptOps();
|
||||
|
||||
/**
|
||||
@@ -427,7 +436,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
@@ -442,7 +455,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} and
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy,
|
||||
Class<T> entityClass);
|
||||
|
||||
@@ -630,24 +648,52 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected. Must not be {@literal null} nor empty.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -1144,11 +1190,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1206,11 +1252,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1224,7 +1270,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -116,11 +116,11 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio
|
||||
TypedAggregation<?> typedAggregation = (TypedAggregation<?>) aggregation;
|
||||
|
||||
if (typedAggregation.getInputType() != null) {
|
||||
return template.determineCollectionName(typedAggregation.getInputType());
|
||||
return template.getCollectionName(typedAggregation.getInputType());
|
||||
}
|
||||
}
|
||||
|
||||
return template.determineCollectionName(domainType);
|
||||
return template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,200 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
|
||||
/**
|
||||
* {@link ReactiveChangeStreamOperation} allows creation and execution of reactive MongoDB
|
||||
* <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> operations in a fluent API style. <br />
|
||||
* The starting {@literal domainType} is used for mapping a potentially given
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} used for filtering. By default, the
|
||||
* originating {@literal domainType} is also used for mapping back the result from the {@link org.bson.Document}.
|
||||
* However, it is possible to define an different {@literal returnType} via {@code as}.<br />
|
||||
* The collection to operate on is optional in which case call collection with the actual database are watched, use
|
||||
* {@literal watchCollection} to define a fixed collection.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* changeStream(Jedi.class)
|
||||
* .watchCollection("star-wars")
|
||||
* .filter(where("operationType").is("insert"))
|
||||
* .resumeAt(Instant.now())
|
||||
* .listen();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface ReactiveChangeStreamOperation {
|
||||
|
||||
/**
|
||||
* Start creating a change stream operation for the given {@literal domainType} watching all collections within the
|
||||
* database. <br />
|
||||
* Consider limiting events be defining a {@link ChangeStreamWithCollection#watchCollection(String) collection} and/or
|
||||
* {@link ChangeStreamWithFilterAndProjection#filter(CriteriaDefinition) filter}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}. Use {@link org.bson.Document} to obtain raw elements.
|
||||
* @return new instance of {@link ReactiveChangeStream}. Never {@literal null}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> ReactiveChangeStream<T> changeStream(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Compose change stream execution by calling one of the terminating methods.
|
||||
*/
|
||||
interface TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription}
|
||||
* is {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped.
|
||||
*/
|
||||
Flux<ChangeStreamEvent<T>> listen();
|
||||
}
|
||||
|
||||
/**
|
||||
* Collection override (optional).
|
||||
*/
|
||||
interface ChangeStreamWithCollection<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to watch.<br />
|
||||
* Skip this step to watch all collections within the database.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if {@code collection} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> watchCollection(String collection);
|
||||
|
||||
/**
|
||||
* Set the the collection to watch. Collection name is derived from the {@link Class entityClass}.<br />
|
||||
* Skip this step to watch all collections within the database.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if {@code entityClass} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> watchCollection(Class<?> entityClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a filter for limiting results (optional).
|
||||
*/
|
||||
interface ChangeStreamWithFilterAndProjection<T> extends ResumingChangeStream<T>, TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Use an {@link Aggregation} to filter matching events.
|
||||
*
|
||||
* @param by must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if the given {@link Aggregation} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> filter(Aggregation by);
|
||||
|
||||
/**
|
||||
* Use a {@link CriteriaDefinition critera} to filter matching events via an
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation}.
|
||||
*
|
||||
* @param by must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if the given {@link CriteriaDefinition} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> filter(CriteriaDefinition by);
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to.
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> ChangeStreamWithFilterAndProjection<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume a change stream. (optional).
|
||||
*/
|
||||
interface ResumingChangeStream<T> extends TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Resume the change stream at a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#resumeAt(Instant)
|
||||
* @see ChangeStreamOptionsBuilder#resumeAt(BsonTimestamp)
|
||||
* @throws IllegalArgumentException if the given beacon is neither {@link Instant} nor {@link BsonTimestamp}.
|
||||
*/
|
||||
TerminatingChangeStream<T> resumeAt(Object token);
|
||||
|
||||
/**
|
||||
* Resume the change stream after a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#resumeAfter(BsonValue)
|
||||
* @see ChangeStreamOptionsBuilder#resumeToken(BsonValue)
|
||||
* @throws IllegalArgumentException if the given beacon not a {@link BsonValue}.
|
||||
*/
|
||||
TerminatingChangeStream<T> resumeAfter(Object token);
|
||||
|
||||
/**
|
||||
* Start the change stream after a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#startAfter(BsonValue) (BsonValue)
|
||||
* @throws IllegalArgumentException if the given beacon not a {@link BsonValue}.
|
||||
*/
|
||||
TerminatingChangeStream<T> startAfter(Object token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide some options.
|
||||
*/
|
||||
interface ChangeStreamWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Provide some options via the callback by modifying the given {@link ChangeStreamOptionsBuilder}. Previously
|
||||
* defined options like a {@link ResumingChangeStream#resumeAfter(Object) resumeToken} are carried over to the
|
||||
* builder and can be overwritten via eg. {@link ChangeStreamOptionsBuilder#resumeToken(BsonValue)}.
|
||||
*
|
||||
* @param optionsConsumer never {@literal null}.
|
||||
* @return new instance of {@link ReactiveChangeStream}.
|
||||
*/
|
||||
ReactiveChangeStream<T> withOptions(Consumer<ChangeStreamOptionsBuilder> optionsConsumer);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ReactiveChangeStream} provides methods for constructing change stream operations in a fluent way.
|
||||
*/
|
||||
interface ReactiveChangeStream<T> extends ChangeStreamWithOptions<T>, ChangeStreamWithCollection<T>,
|
||||
TerminatingChangeStream<T>, ResumingChangeStream<T>, ChangeStreamWithFilterAndProjection<T> {}
|
||||
}
|
||||
@@ -0,0 +1,230 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.MatchOperation;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperation {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
/**
|
||||
* @param template must not be {@literal null}.
|
||||
*/
|
||||
ReactiveChangeStreamOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation#changeStream(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> ReactiveChangeStream<T> changeStream(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, domainType, null, null);
|
||||
}
|
||||
|
||||
static class ReactiveChangeStreamSupport<T>
|
||||
implements ReactiveChangeStream<T>, ChangeStreamWithFilterAndProjection<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final @Nullable String collection;
|
||||
private final @Nullable ChangeStreamOptions options;
|
||||
|
||||
private ReactiveChangeStreamSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
@Nullable String collection, @Nullable ChangeStreamOptions options) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> watchCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> watchCollection(Class<?> entityClass) {
|
||||
|
||||
Assert.notNull(entityClass, "Collection type not be null!");
|
||||
|
||||
return watchCollection(template.getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAt(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> resumeAt(Object token) {
|
||||
|
||||
return withOptions(builder -> {
|
||||
|
||||
if (token instanceof Instant) {
|
||||
builder.resumeAt((Instant) token);
|
||||
} else if (token instanceof BsonTimestamp) {
|
||||
builder.resumeAt((BsonTimestamp) token);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAfter(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> resumeAfter(Object token) {
|
||||
|
||||
Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue");
|
||||
|
||||
return withOptions(builder -> builder.resumeAfter((BsonValue) token));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#startAfter(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> startAfter(Object token) {
|
||||
|
||||
Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue");
|
||||
|
||||
return withOptions(builder -> builder.startAfter((BsonValue) token));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithOptions#withOptions(java.util.function.Consumer)
|
||||
*/
|
||||
@Override
|
||||
public ReactiveChangeStreamSupport<T> withOptions(Consumer<ChangeStreamOptionsBuilder> optionsConsumer) {
|
||||
|
||||
ChangeStreamOptionsBuilder builder = initOptionsBuilder();
|
||||
optionsConsumer.accept(builder);
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, builder.build());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <R> ChangeStreamWithFilterAndProjection<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, resultType, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.aggregation.Aggregation)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> filter(Aggregation filter) {
|
||||
return withOptions(builder -> builder.filter(filter));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.query.CriteriaDefinition)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> filter(CriteriaDefinition by) {
|
||||
|
||||
MatchOperation $match = Aggregation.match(by);
|
||||
Aggregation aggregation = !Document.class.equals(domainType) ? Aggregation.newAggregation(domainType, $match)
|
||||
: Aggregation.newAggregation($match);
|
||||
return filter(aggregation);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.TerminatingChangeStream#listen()
|
||||
*/
|
||||
@Override
|
||||
public Flux<ChangeStreamEvent<T>> listen() {
|
||||
return template.changeStream(collection, options != null ? options : ChangeStreamOptions.empty(), returnType);
|
||||
}
|
||||
|
||||
private ChangeStreamOptionsBuilder initOptionsBuilder() {
|
||||
|
||||
ChangeStreamOptionsBuilder builder = ChangeStreamOptions.builder();
|
||||
if (options == null) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
options.getFilter().ifPresent(it -> {
|
||||
if (it instanceof Aggregation) {
|
||||
builder.filter((Aggregation) it);
|
||||
} else {
|
||||
builder.filter(((List<Document>) it).toArray(new Document[0]));
|
||||
}
|
||||
});
|
||||
options.getFullDocumentLookup().ifPresent(builder::fullDocumentLookup);
|
||||
options.getCollation().ifPresent(builder::collation);
|
||||
|
||||
if (options.isResumeAfter()) {
|
||||
options.getResumeToken().ifPresent(builder::resumeAfter);
|
||||
options.getResumeBsonTimestamp().ifPresent(builder::resumeAfter);
|
||||
} else if (options.isStartAfter()) {
|
||||
options.getResumeToken().ifPresent(builder::startAfter);
|
||||
} else {
|
||||
options.getResumeTimestamp().ifPresent(builder::resumeAt);
|
||||
options.getResumeBsonTimestamp().ifPresent(builder::resumeAt);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,6 +23,7 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -31,8 +32,6 @@ import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.reactivestreams.client.FindPublisher;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ReactiveFindOperation}.
|
||||
*
|
||||
@@ -120,12 +119,7 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
public Mono<T> first() {
|
||||
|
||||
FindPublisherPreparer preparer = getCursorPreparer(query);
|
||||
Flux<T> result = doFind(new FindPublisherPreparer() {
|
||||
@Override
|
||||
public <D> FindPublisher<D> prepare(FindPublisher<D> publisher) {
|
||||
return preparer.prepare(publisher).limit(1);
|
||||
}
|
||||
});
|
||||
Flux<T> result = doFind(publisher -> preparer.prepare(publisher).limit(1));
|
||||
|
||||
return result.next();
|
||||
}
|
||||
@@ -138,12 +132,7 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
public Mono<T> one() {
|
||||
|
||||
FindPublisherPreparer preparer = getCursorPreparer(query);
|
||||
Flux<T> result = doFind(new FindPublisherPreparer() {
|
||||
@Override
|
||||
public <D> FindPublisher<D> prepare(FindPublisher<D> publisher) {
|
||||
return preparer.prepare(publisher).limit(2);
|
||||
}
|
||||
});
|
||||
Flux<T> result = doFind(publisher -> preparer.prepare(publisher).limit(2));
|
||||
|
||||
return result.collectList().flatMap(it -> {
|
||||
|
||||
@@ -238,7 +227,7 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
|
||||
private String asString() {
|
||||
|
||||
@@ -23,4 +23,4 @@ package org.springframework.data.mongodb.core;
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFluentMongoOperations extends ReactiveFindOperation, ReactiveInsertOperation,
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation {}
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation, ReactiveChangeStreamOperation {}
|
||||
|
||||
@@ -96,7 +96,7 @@ class ReactiveInsertOperationSupport implements ReactiveInsertOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,7 +171,7 @@ class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,6 +40,7 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.reactive.TransactionalOperator;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
@@ -56,6 +57,10 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
* Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability
|
||||
* (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using
|
||||
* {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -216,7 +221,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
*
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction();
|
||||
|
||||
/**
|
||||
@@ -231,7 +238,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param sessionProvider must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction(Publisher<ClientSession> sessionProvider);
|
||||
|
||||
/**
|
||||
@@ -277,12 +286,15 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
Flux<String> getCollectionNames();
|
||||
|
||||
/**
|
||||
* Get a collection by name, creating it if it doesn't exist.
|
||||
* Get a {@link MongoCollection} by name. The returned collection may not exists yet (except in local memory) and is
|
||||
* created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection.
|
||||
* @return an existing collection or a newly created one.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
*/
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
@@ -612,24 +624,52 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* entity mapping information to determine the collection the query is ran against. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB
|
||||
* limits the number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect
|
||||
* a particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -930,11 +970,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -990,11 +1030,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1038,11 +1078,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1056,7 +1096,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
@@ -1075,11 +1115,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1093,7 +1133,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -112,7 +112,7 @@ class ReactiveRemoveOperationSupport implements ReactiveRemoveOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -123,17 +123,17 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
|
||||
String collectionName = getCollectionName();
|
||||
|
||||
return template.findAndModify(query, update, findAndModifyOptions, targetType, collectionName);
|
||||
return template.findAndModify(query, update, findAndModifyOptions != null ? findAndModifyOptions : FindAndModifyOptions.none(), targetType, collectionName);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingFindAndReplace#findAndReplace()
|
||||
*/
|
||||
@Override
|
||||
public Mono<T> findAndReplace() {
|
||||
return template.findAndReplace(query, replacement,
|
||||
findAndReplaceOptions != null ? findAndReplaceOptions : new FindAndReplaceOptions(), (Class) domainType,
|
||||
findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.none(), (Class) domainType,
|
||||
getCollectionName(), targetType);
|
||||
}
|
||||
|
||||
@@ -172,7 +172,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#replaceWith(java.lang.Object)
|
||||
*/
|
||||
@@ -185,7 +185,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@@ -216,7 +216,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
|
||||
/**
|
||||
* Interface to be implemented by any object that wishes to expose the {@link ReadPreference}.
|
||||
* <p>
|
||||
* Typically implemented by cursor or query preparer objects.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface ReadPreferenceAware {
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a {@link ReadPreference} is set.
|
||||
*/
|
||||
default boolean hasReadPreference() {
|
||||
return getReadPreference() != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link ReadPreference} to apply or {@literal null} if none set.
|
||||
*/
|
||||
@Nullable
|
||||
ReadPreference getReadPreference();
|
||||
}
|
||||
@@ -29,7 +29,9 @@ import com.mongodb.DB;
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
public interface ScriptOperations {
|
||||
|
||||
/**
|
||||
|
||||
@@ -211,6 +211,15 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase decorateDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
@@ -213,6 +213,20 @@ public class AccumulatorOperators {
|
||||
return new Sum(append(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Sum} with all previously added arguments appending the given one. <br />
|
||||
* <strong>NOTE:</strong> Only possible in {@code $project} stage.
|
||||
*
|
||||
* @param value the value to add.
|
||||
* @return new instance of {@link Sum}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Sum and(Number value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Sum(append(value));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
|
||||
@@ -244,6 +244,19 @@ public class Aggregation {
|
||||
return new ProjectionOperation(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including all top level fields of the given given {@link Class}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return new instance of {@link ProjectionOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ProjectionOperation project(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
return new ProjectionOperation(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link UnwindOperation} for the field with the given name.
|
||||
*
|
||||
@@ -612,7 +625,7 @@ public class Aggregation {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
/**
|
||||
@@ -32,7 +35,23 @@ public interface AggregationOperation {
|
||||
* Turns the {@link AggregationOperation} into a {@link Document} by using the given
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}.
|
||||
* @return the Document
|
||||
* @deprecated since 2.2 in favor of {@link #toPipelineStages(AggregationOperationContext)}.
|
||||
*/
|
||||
@Deprecated
|
||||
Document toDocument(AggregationOperationContext context);
|
||||
|
||||
/**
|
||||
* Turns the {@link AggregationOperation} into list of {@link Document stages} by using the given
|
||||
* {@link AggregationOperationContext}. This allows a single {@link AggregationOptions} to add additional stages for
|
||||
* eg. {@code $sort} or {@code $limit}.
|
||||
*
|
||||
* @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}.
|
||||
* @return the pipeline stages to run through. Never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default List<Document> toPipelineStages(AggregationOperationContext context) {
|
||||
return Collections.singletonList(toDocument(context));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,8 +15,17 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
/**
|
||||
* The context for an {@link AggregationOperation}.
|
||||
@@ -33,7 +42,20 @@ public interface AggregationOperationContext {
|
||||
* @param document will never be {@literal null}.
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
Document getMappedObject(Document document);
|
||||
default Document getMappedObject(Document document) {
|
||||
return getMappedObject(document, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the mapped {@link Document}, potentially converting the source considering mapping metadata for the given
|
||||
* type.
|
||||
*
|
||||
* @param document will never be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @return must not be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
Document getMappedObject(Document document, @Nullable Class<?> type);
|
||||
|
||||
/**
|
||||
* Returns a {@link FieldReference} for the given field or {@literal null} if the context does not expose the given
|
||||
@@ -52,4 +74,33 @@ public interface AggregationOperationContext {
|
||||
* @return
|
||||
*/
|
||||
FieldReference getReference(String name);
|
||||
|
||||
/**
|
||||
* Returns the {@link Fields} exposed by the type. May be a {@literal class} or an {@literal interface}. The default
|
||||
* implementation uses {@link BeanUtils#getPropertyDescriptors(Class) property descriptors} discover fields from a
|
||||
* {@link Class}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.2
|
||||
* @see BeanUtils#getPropertyDescriptor(Class, String)
|
||||
*/
|
||||
default Fields getFields(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
return Fields.fields(Arrays.stream(BeanUtils.getPropertyDescriptors(type)) //
|
||||
.filter(it -> { // object and default methods
|
||||
Method method = it.getReadMethod();
|
||||
if (method == null) {
|
||||
return false;
|
||||
}
|
||||
if (ReflectionUtils.isObjectMethod(method)) {
|
||||
return false;
|
||||
}
|
||||
return !method.isDefault();
|
||||
}) //
|
||||
.map(PropertyDescriptor::getName) //
|
||||
.toArray(String[]::new));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedFi
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Rendering support for {@link AggregationOperation} into a {@link List} of {@link org.bson.Document}.
|
||||
@@ -52,7 +53,7 @@ class AggregationOperationRenderer {
|
||||
|
||||
for (AggregationOperation operation : operations) {
|
||||
|
||||
operationDocuments.add(operation.toDocument(contextToUse));
|
||||
operationDocuments.addAll(operation.toPipelineStages(contextToUse));
|
||||
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
|
||||
@@ -75,15 +76,16 @@ class AggregationOperationRenderer {
|
||||
* Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class NoOpAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document) {
|
||||
public Document getMappedObject(Document document, @Nullable Class<?> type) {
|
||||
return document;
|
||||
}
|
||||
|
||||
|
||||
@@ -44,11 +44,13 @@ public class AggregationOptions {
|
||||
private static final String EXPLAIN = "explain";
|
||||
private static final String ALLOW_DISK_USE = "allowDiskUse";
|
||||
private static final String COLLATION = "collation";
|
||||
private static final String COMMENT = "comment";
|
||||
|
||||
private final boolean allowDiskUse;
|
||||
private final boolean explain;
|
||||
private final Optional<Document> cursor;
|
||||
private final Optional<Collation> collation;
|
||||
private final Optional<String> comment;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
@@ -73,11 +75,28 @@ public class AggregationOptions {
|
||||
*/
|
||||
public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor,
|
||||
@Nullable Collation collation) {
|
||||
this(allowDiskUse, explain, cursor, collation, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
*
|
||||
* @param allowDiskUse whether to off-load intensive sort-operations to disk.
|
||||
* @param explain whether to get the execution plan for the aggregation instead of the actual results.
|
||||
* @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the
|
||||
* aggregation.
|
||||
* @param collation collation for string comparison. Can be {@literal null}.
|
||||
* @param comment execution comment. Can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor,
|
||||
@Nullable Collation collation, @Nullable String comment) {
|
||||
|
||||
this.allowDiskUse = allowDiskUse;
|
||||
this.explain = explain;
|
||||
this.cursor = Optional.ofNullable(cursor);
|
||||
this.collation = Optional.ofNullable(collation);
|
||||
this.comment = Optional.ofNullable(comment);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -108,8 +127,9 @@ public class AggregationOptions {
|
||||
Document cursor = document.get(CURSOR, Document.class);
|
||||
Collation collation = document.containsKey(COLLATION) ? Collation.from(document.get(COLLATION, Document.class))
|
||||
: null;
|
||||
String comment = document.getString(COMMENT);
|
||||
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation);
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -176,6 +196,16 @@ public class AggregationOptions {
|
||||
return collation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the comment for the aggregation.
|
||||
*
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<String> getComment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration
|
||||
* applied.
|
||||
@@ -219,6 +249,7 @@ public class AggregationOptions {
|
||||
|
||||
cursor.ifPresent(val -> document.put(CURSOR, val));
|
||||
collation.ifPresent(val -> document.append(COLLATION, val.toDocument()));
|
||||
comment.ifPresent(val -> document.append(COMMENT, val));
|
||||
|
||||
return document;
|
||||
}
|
||||
@@ -247,6 +278,7 @@ public class AggregationOptions {
|
||||
private boolean explain;
|
||||
private @Nullable Document cursor;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable String comment;
|
||||
|
||||
/**
|
||||
* Defines whether to off-load intensive sort-operations to disk.
|
||||
@@ -302,6 +334,7 @@ public class AggregationOptions {
|
||||
*
|
||||
* @param collation can be {@literal null}.
|
||||
* @return
|
||||
* @since 2.0
|
||||
*/
|
||||
public Builder collation(@Nullable Collation collation) {
|
||||
|
||||
@@ -309,13 +342,26 @@ public class AggregationOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a comment to describe the execution.
|
||||
*
|
||||
* @param comment can be {@literal null}.
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
public Builder comment(@Nullable String comment) {
|
||||
|
||||
this.comment = comment;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions} instance with the given configuration.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public AggregationOptions build() {
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation);
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@@ -33,6 +34,7 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Shashank Sharma
|
||||
* @since 1.0
|
||||
*/
|
||||
public class ArrayOperators {
|
||||
@@ -57,13 +59,25 @@ public class ArrayOperators {
|
||||
return new ArrayOperatorFactory(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the given {@link Collection values} {@link AggregationExpression}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link ArrayOperatorFactory}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ArrayOperatorFactory arrayOf(Collection<?> values) {
|
||||
return new ArrayOperatorFactory(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class ArrayOperatorFactory {
|
||||
|
||||
private final String fieldReference;
|
||||
private final AggregationExpression expression;
|
||||
private final @Nullable String fieldReference;
|
||||
private final @Nullable AggregationExpression expression;
|
||||
private final @Nullable Collection values;
|
||||
|
||||
/**
|
||||
* Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}.
|
||||
@@ -75,6 +89,7 @@ public class ArrayOperators {
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
this.fieldReference = fieldReference;
|
||||
this.expression = null;
|
||||
this.values = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -87,6 +102,21 @@ public class ArrayOperators {
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
this.fieldReference = null;
|
||||
this.expression = expression;
|
||||
this.values = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ArrayOperatorFactory} for given values.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ArrayOperatorFactory(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
this.fieldReference = null;
|
||||
this.expression = null;
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -127,7 +157,12 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
private ArrayElemAt createArrayElemAt() {
|
||||
return usesFieldRef() ? ArrayElemAt.arrayOf(fieldReference) : ArrayElemAt.arrayOf(expression);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return ArrayElemAt.arrayOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? ArrayElemAt.arrayOf(expression) : ArrayElemAt.arrayOf(values);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -157,7 +192,12 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
private ConcatArrays createConcatArrays() {
|
||||
return usesFieldRef() ? ConcatArrays.arrayOf(fieldReference) : ConcatArrays.arrayOf(expression);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return ConcatArrays.arrayOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? ConcatArrays.arrayOf(expression) : ConcatArrays.arrayOf(values);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -167,7 +207,13 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public AsBuilder filter() {
|
||||
return Filter.filter(fieldReference);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Filter.filter(fieldReference);
|
||||
}
|
||||
|
||||
Assert.state(values != null, "Values must not be null!");
|
||||
return Filter.filter(new ArrayList<>(values));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -176,6 +222,9 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public IsArray isArray() {
|
||||
|
||||
Assert.state(values == null, "Does it make sense to call isArray on an array? Maybe just skip it?");
|
||||
|
||||
return usesFieldRef() ? IsArray.isArray(fieldReference) : IsArray.isArray(expression);
|
||||
}
|
||||
|
||||
@@ -185,7 +234,12 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public Size length() {
|
||||
return usesFieldRef() ? Size.lengthOfArray(fieldReference) : Size.lengthOfArray(expression);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Size.lengthOfArray(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? Size.lengthOfArray(expression) : Size.lengthOfArray(values);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -194,7 +248,12 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public Slice slice() {
|
||||
return usesFieldRef() ? Slice.sliceArrayOf(fieldReference) : Slice.sliceArrayOf(expression);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Slice.sliceArrayOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? Slice.sliceArrayOf(expression) : Slice.sliceArrayOf(values);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -205,8 +264,13 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public IndexOfArray indexOf(Object value) {
|
||||
return usesFieldRef() ? IndexOfArray.arrayOf(fieldReference).indexOf(value)
|
||||
: IndexOfArray.arrayOf(expression).indexOf(value);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return IndexOfArray.arrayOf(fieldReference).indexOf(value);
|
||||
}
|
||||
|
||||
return usesExpression() ? IndexOfArray.arrayOf(expression).indexOf(value)
|
||||
: IndexOfArray.arrayOf(values).indexOf(value);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -215,7 +279,13 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public ReverseArray reverse() {
|
||||
return usesFieldRef() ? ReverseArray.reverseArrayOf(fieldReference) : ReverseArray.reverseArrayOf(expression);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return ReverseArray.reverseArrayOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? ReverseArray.reverseArrayOf(expression)
|
||||
: ReverseArray.reverseArrayOf(Collections.singletonList(values));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -253,7 +323,12 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public Zip zipWith(Object... arrays) {
|
||||
return (usesFieldRef() ? Zip.arrayOf(fieldReference) : Zip.arrayOf(expression)).zip(arrays);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Zip.arrayOf(fieldReference).zip(arrays);
|
||||
}
|
||||
|
||||
return (usesExpression() ? Zip.arrayOf(expression) : Zip.arrayOf(values)).zip(arrays);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -264,7 +339,12 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public In containsValue(Object value) {
|
||||
return (usesFieldRef() ? In.arrayOf(fieldReference) : In.arrayOf(expression)).containsValue(value);
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return In.arrayOf(fieldReference).containsValue(value);
|
||||
}
|
||||
|
||||
return (usesExpression() ? In.arrayOf(expression) : In.arrayOf(values)).containsValue(value);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -276,8 +356,11 @@ public class ArrayOperators {
|
||||
*/
|
||||
public ArrayToObject toObject() {
|
||||
|
||||
return usesFieldRef() ? ArrayToObject.arrayValueOfToObject(fieldReference)
|
||||
: ArrayToObject.arrayValueOfToObject(expression);
|
||||
if (usesFieldRef()) {
|
||||
return ArrayToObject.arrayValueOfToObject(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? ArrayToObject.arrayValueOfToObject(expression) : ArrayToObject.arrayToObject(values);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -294,9 +377,20 @@ public class ArrayOperators {
|
||||
Reduce startingWith(Object initialValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if {@link #fieldReference} is not {@literal null}.
|
||||
*/
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if {@link #expression} is not {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
private boolean usesExpression() {
|
||||
return expression != null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -339,6 +433,19 @@ public class ArrayOperators {
|
||||
return new ArrayElemAt(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ArrayElemAt}.
|
||||
*
|
||||
* @param values The array members. Must not be {@literal null}.
|
||||
* @return new instance of {@link ArrayElemAt}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ArrayElemAt arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new ArrayElemAt(Collections.singletonList(values));
|
||||
}
|
||||
|
||||
public ArrayElemAt elementAt(int index) {
|
||||
return new ArrayElemAt(append(index));
|
||||
}
|
||||
@@ -396,6 +503,19 @@ public class ArrayOperators {
|
||||
return new ConcatArrays(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ConcatArrays}.
|
||||
*
|
||||
* @param values The array members. Must not be {@literal null}.
|
||||
* @return new instance of {@link ConcatArrays}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ConcatArrays arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new ConcatArrays(Collections.singletonList(values));
|
||||
}
|
||||
|
||||
public ConcatArrays concat(String arrayFieldReference) {
|
||||
|
||||
Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null!");
|
||||
@@ -496,7 +616,7 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
NestedDelegatingExpressionAggregationOperationContext nea = new NestedDelegatingExpressionAggregationOperationContext(
|
||||
context);
|
||||
context, Collections.singleton(as));
|
||||
return ((AggregationExpression) condition).toDocument(nea);
|
||||
}
|
||||
|
||||
@@ -740,6 +860,19 @@ public class ArrayOperators {
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new Size(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Size}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link Size}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static Size lengthOfArray(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new Size(Collections.singletonList(values));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -782,6 +915,19 @@ public class ArrayOperators {
|
||||
return new Slice(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Slice}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link Slice}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static Slice sliceArrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new Slice(Collections.singletonList(values));
|
||||
}
|
||||
|
||||
public Slice itemCount(int nrElements) {
|
||||
return new Slice(append(nrElements));
|
||||
}
|
||||
@@ -852,6 +998,19 @@ public class ArrayOperators {
|
||||
return new IndexOfArrayBuilder(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start creating new {@link IndexOfArray}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link IndexOfArray}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static IndexOfArrayBuilder arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new IndexOfArrayBuilder(values);
|
||||
}
|
||||
|
||||
public IndexOfArray within(Range<Long> range) {
|
||||
return new IndexOfArray(append(AggregationUtils.toRangeValues(range)));
|
||||
}
|
||||
@@ -1006,6 +1165,17 @@ public class ArrayOperators {
|
||||
public static ReverseArray reverseArrayOf(AggregationExpression expression) {
|
||||
return new ReverseArray(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ReverseArray}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link ReverseArray}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ReverseArray reverseArrayOf(Collection<?> values) {
|
||||
return new ReverseArray(values);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1358,6 +1528,19 @@ public class ArrayOperators {
|
||||
return new ZipBuilder(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start creating new {@link Zip}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link Zip}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ZipBuilder arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Expression must not be null!");
|
||||
return new ZipBuilder(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link Zip} and set the {@code useLongestLength} property to {@literal true}.
|
||||
*
|
||||
@@ -1442,6 +1625,10 @@ public class ArrayOperators {
|
||||
* {@link AggregationExpression} for {@code $in}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Shashank Sharma
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
|
||||
* @since 2.2
|
||||
*/
|
||||
public static class In extends AbstractAggregationExpression {
|
||||
|
||||
@@ -1460,18 +1647,14 @@ public class ArrayOperators {
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static InBuilder arrayOf(final String fieldReference) {
|
||||
public static InBuilder arrayOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new InBuilder() {
|
||||
return value -> {
|
||||
|
||||
@Override
|
||||
public In containsValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new In(Arrays.asList(value, Fields.field(fieldReference)));
|
||||
}
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new In(Arrays.asList(value, Fields.field(fieldReference)));
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1481,18 +1664,34 @@ public class ArrayOperators {
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static InBuilder arrayOf(final AggregationExpression expression) {
|
||||
public static InBuilder arrayOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new InBuilder() {
|
||||
return value -> {
|
||||
|
||||
@Override
|
||||
public In containsValue(Object value) {
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new In(Arrays.asList(value, expression));
|
||||
}
|
||||
return new In(Arrays.asList(value, expression));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Support for Aggregation In Search an Element in List of Objects to Filter Start creating {@link In}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link InBuilder}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static InBuilder arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
|
||||
return value -> {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
|
||||
return new In(Arrays.asList(value, values));
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -56,11 +56,11 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document) {
|
||||
return rootContext.getMappedObject(document);
|
||||
public Document getMappedObject(Document document, @Nullable Class<?> type) {
|
||||
return rootContext.getMappedObject(document, type);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -81,6 +81,15 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
return getReference(null, name);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Fields getFields(Class<?> type) {
|
||||
return rootContext.getFields(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}.
|
||||
*
|
||||
|
||||
@@ -112,7 +112,7 @@ public final class Fields implements Iterable<Field> {
|
||||
this.fields = verify(fields);
|
||||
}
|
||||
|
||||
private static final List<Field> verify(List<Field> fields) {
|
||||
private static List<Field> verify(List<Field> fields) {
|
||||
|
||||
Map<String, Field> reference = new HashMap<String, Field>();
|
||||
|
||||
|
||||
@@ -15,10 +15,15 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -89,6 +94,15 @@ public class GeoNearOperation implements AggregationOperation {
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
Document command = context.getMappedObject(nearQuery.toDocument());
|
||||
|
||||
if(command.containsKey("query")) {
|
||||
command.replace("query", context.getMappedObject(command.get("query", Document.class)));
|
||||
}
|
||||
|
||||
if(command.containsKey("collation")) {
|
||||
command.remove("collation");
|
||||
}
|
||||
|
||||
command.put("distanceField", distanceField);
|
||||
|
||||
if (StringUtils.hasText(indexKey)) {
|
||||
@@ -97,4 +111,28 @@ public class GeoNearOperation implements AggregationOperation {
|
||||
|
||||
return new Document("$geoNear", command);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toPipelineStages(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public List<Document> toPipelineStages(AggregationOperationContext context) {
|
||||
|
||||
Document command = toDocument(context);
|
||||
Number limit = (Number) command.get("$geoNear", Document.class).remove("num");
|
||||
|
||||
List<Document> stages = new ArrayList<>();
|
||||
stages.add(command);
|
||||
|
||||
if(nearQuery.getSkip() != null && nearQuery.getSkip() > 0){
|
||||
stages.add(new Document("$skip", nearQuery.getSkip()));
|
||||
}
|
||||
|
||||
if(limit != null) {
|
||||
stages.add(new Document("$limit", limit.longValue()));
|
||||
}
|
||||
|
||||
return stages;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,7 +128,13 @@ public class GraphLookupOperation implements InheritsFieldsAggregationOperation
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return ExposedFields.from(new ExposedField(as, true));
|
||||
|
||||
List<ExposedField> fields = new ArrayList<>(2);
|
||||
fields.add(new ExposedField(as, true));
|
||||
if(depthField != null) {
|
||||
fields.add(new ExposedField(depthField, true));
|
||||
}
|
||||
return ExposedFields.from(fields.toArray(new ExposedField[0]));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,9 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExpressionFieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -26,21 +29,25 @@ import org.springframework.util.Assert;
|
||||
* variable.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.10
|
||||
*/
|
||||
class NestedDelegatingExpressionAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final AggregationOperationContext delegate;
|
||||
private final Collection<Field> inners;
|
||||
|
||||
/**
|
||||
* Creates new {@link NestedDelegatingExpressionAggregationOperationContext}.
|
||||
*
|
||||
* @param referenceContext must not be {@literal null}.
|
||||
*/
|
||||
public NestedDelegatingExpressionAggregationOperationContext(AggregationOperationContext referenceContext) {
|
||||
NestedDelegatingExpressionAggregationOperationContext(AggregationOperationContext referenceContext,
|
||||
Collection<Field> inners) {
|
||||
|
||||
Assert.notNull(referenceContext, "Reference context must not be null!");
|
||||
this.delegate = referenceContext;
|
||||
this.inners = inners;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -52,13 +59,40 @@ class NestedDelegatingExpressionAggregationOperationContext implements Aggregati
|
||||
return delegate.getMappedObject(document);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document, Class<?> type) {
|
||||
return delegate.getMappedObject(document, type);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return new ExpressionFieldReference(delegate.getReference(field));
|
||||
|
||||
FieldReference reference = delegate.getReference(field);
|
||||
return isInnerVariableReference(field) ? new ExpressionFieldReference(delegate.getReference(field)) : reference;
|
||||
}
|
||||
|
||||
private boolean isInnerVariableReference(Field field) {
|
||||
|
||||
if (inners.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (Field inner : inners) {
|
||||
if (inner.getName().equals(field.getName())
|
||||
|| (field.getTarget().contains(".") && field.getTarget().startsWith(inner.getName()))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -69,4 +103,13 @@ class NestedDelegatingExpressionAggregationOperationContext implements Aggregati
|
||||
public FieldReference getReference(String name) {
|
||||
return new ExpressionFieldReference(delegate.getReference(name));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Fields getFields(Class<?> type) {
|
||||
return delegate.getFields(type);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,11 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $out}-operation.
|
||||
@@ -26,18 +30,161 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Nikolay Bogdanov
|
||||
* @author Christoph Strobl
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/out/">MongoDB Aggregation Framework: $out</a>
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/out/">MongoDB Aggregation Framework:
|
||||
* $out</a>
|
||||
*/
|
||||
public class OutOperation implements AggregationOperation {
|
||||
|
||||
private final @Nullable String databaseName;
|
||||
private final String collectionName;
|
||||
private final @Nullable Document uniqueKey;
|
||||
private final @Nullable OutMode mode;
|
||||
|
||||
/**
|
||||
* @param outCollectionName Collection name to export the results. Must not be {@literal null}.
|
||||
*/
|
||||
public OutOperation(String outCollectionName) {
|
||||
Assert.notNull(outCollectionName, "Collection name must not be null!");
|
||||
this.collectionName = outCollectionName;
|
||||
this(null, outCollectionName, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param databaseName Optional database name the target collection is located in. Can be {@literal null}.
|
||||
* @param collectionName Collection name to export the results. Must not be {@literal null}. Can be {@literal null}.
|
||||
* @param uniqueKey Optional unique key spec identify a document in the to collection for replacement or merge.
|
||||
* @param mode The mode for merging the aggregation pipeline output with the target collection. Can be
|
||||
* {@literal null}. {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
private OutOperation(@Nullable String databaseName, String collectionName, @Nullable Document uniqueKey,
|
||||
@Nullable OutMode mode) {
|
||||
|
||||
Assert.notNull(collectionName, "Collection name must not be null!");
|
||||
|
||||
this.databaseName = databaseName;
|
||||
this.collectionName = collectionName;
|
||||
this.uniqueKey = uniqueKey;
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the database of the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @param database can be {@literal null}. Defaulted to aggregation target database.
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation in(@Nullable String database) {
|
||||
return new OutOperation(database, collectionName, uniqueKey, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the field that uniquely identifies a document in the target collection. <br />
|
||||
* For convenience the given {@literal key} can either be a single field name or the Json representation of a key
|
||||
* {@link Document}.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* // {
|
||||
* // "field-1" : 1
|
||||
* // }
|
||||
* .uniqueKey("field-1")
|
||||
*
|
||||
* // {
|
||||
* // "field-1" : 1,
|
||||
* // "field-2" : 1
|
||||
* // }
|
||||
* .uniqueKey("{ 'field-1' : 1, 'field-2' : 1}")
|
||||
* </pre>
|
||||
*
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @param key can be {@literal null}. Server uses {@literal _id} when {@literal null}.
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation uniqueKey(@Nullable String key) {
|
||||
|
||||
Document uniqueKey = key == null ? null : BsonUtils.toDocumentOrElse(key, it -> new Document(it, 1));
|
||||
return new OutOperation(databaseName, collectionName, uniqueKey, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the fields that uniquely identifies a document in the target collection. <br />
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* // {
|
||||
* // "field-1" : 1
|
||||
* // "field-2" : 1
|
||||
* // }
|
||||
* .uniqueKeyOf(Arrays.asList("field-1", "field-2"))
|
||||
* </pre>
|
||||
*
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation uniqueKeyOf(Iterable<String> fields) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
|
||||
Document uniqueKey = new Document();
|
||||
fields.forEach(it -> uniqueKey.append(it, 1));
|
||||
|
||||
return new OutOperation(databaseName, collectionName, uniqueKey, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify how to merge the aggregation output with the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @param mode must not be {@literal null}.
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation mode(OutMode mode) {
|
||||
|
||||
Assert.notNull(mode, "Mode must not be null!");
|
||||
return new OutOperation(databaseName, collectionName, uniqueKey, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @see OutMode#REPLACE_COLLECTION
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation replaceCollection() {
|
||||
return mode(OutMode.REPLACE_COLLECTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace/Upsert documents in the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @see OutMode#REPLACE
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation replaceDocuments() {
|
||||
return mode(OutMode.REPLACE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert documents to the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @see OutMode#INSERT
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation insertDocuments() {
|
||||
return mode(OutMode.INSERT);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -46,6 +193,62 @@ public class OutOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$out", collectionName);
|
||||
|
||||
if (!requiresMongoDb42Format()) {
|
||||
return new Document("$out", collectionName);
|
||||
}
|
||||
|
||||
Assert.state(mode != null, "Mode must not be null!");
|
||||
|
||||
Document $out = new Document("to", collectionName) //
|
||||
.append("mode", mode.getMongoMode());
|
||||
|
||||
if (StringUtils.hasText(databaseName)) {
|
||||
$out.append("db", databaseName);
|
||||
}
|
||||
|
||||
if (uniqueKey != null) {
|
||||
$out.append("uniqueKey", uniqueKey);
|
||||
}
|
||||
|
||||
return new Document("$out", $out);
|
||||
}
|
||||
|
||||
private boolean requiresMongoDb42Format() {
|
||||
return StringUtils.hasText(databaseName) || mode != null || uniqueKey != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* The mode for merging the aggregation pipeline output.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public enum OutMode {
|
||||
|
||||
/**
|
||||
* Write documents to the target collection. Errors if a document same uniqueKey already exists.
|
||||
*/
|
||||
INSERT("insertDocuments"),
|
||||
|
||||
/**
|
||||
* Update on any document in the target collection with the same uniqueKey.
|
||||
*/
|
||||
REPLACE("replaceDocuments"),
|
||||
|
||||
/**
|
||||
* Replaces the to collection with the output from the aggregation pipeline. Cannot be in a different database.
|
||||
*/
|
||||
REPLACE_COLLECTION("replaceCollection");
|
||||
|
||||
private String mode;
|
||||
|
||||
OutMode(String mode) {
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
public String getMongoMode() {
|
||||
return mode;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import java.util.Set;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperationContext} implementation prefixing non-command keys on root level with the given prefix.
|
||||
@@ -63,6 +64,15 @@ public class PrefixingDelegatingAggregationOperationContext implements Aggregati
|
||||
return doPrefix(delegate.getMappedObject(document));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document, @Nullable Class<?> type) {
|
||||
return doPrefix(delegate.getMappedObject(document, type));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field)
|
||||
@@ -81,6 +91,15 @@ public class PrefixingDelegatingAggregationOperationContext implements Aggregati
|
||||
return delegate.getReference(name);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Fields getFields(Class<?> type) {
|
||||
return delegate.getFields(type);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Document doPrefix(Document source) {
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond;
|
||||
@@ -73,6 +74,16 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including all top level fields of the given {@link Class type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ProjectionOperation(Class<?> type) {
|
||||
this(NONE, Collections.singletonList(new TypeProjection(type)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor to allow building up {@link ProjectionOperation} instances from already existing
|
||||
* {@link Projection}s.
|
||||
@@ -166,6 +177,48 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
return new ProjectionOperation(this.projections, FieldProjection.from(fields, true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the current {@link ProjectionOperation} as an array with given name. <br />
|
||||
* If you want to specify array values directly use {@link #andArrayOf(Object...)}.
|
||||
*
|
||||
* @param name the target property name.
|
||||
* @return new instance of {@link ProjectionOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ProjectionOperation asArray(String name) {
|
||||
|
||||
return new ProjectionOperation(Collections.emptyList(),
|
||||
Collections.singletonList(new ArrayProjection(Fields.field(name), (List) this.projections)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the given values ({@link Field field references}, {@link AggregationExpression expression}, plain values)
|
||||
* as an array. <br />
|
||||
* The target property name needs to be set via {@link ArrayProjectionOperationBuilder#as(String)}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link ArrayProjectionOperationBuilder}.
|
||||
* @throws IllegalArgumentException if the required argument it {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ArrayProjectionOperationBuilder andArrayOf(Object... values) {
|
||||
|
||||
ArrayProjectionOperationBuilder builder = new ArrayProjectionOperationBuilder(this);
|
||||
|
||||
for (Object value : values) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
builder.and((Field) value);
|
||||
} else if (value instanceof AggregationExpression) {
|
||||
builder.and((AggregationExpression) value);
|
||||
} else {
|
||||
builder.and(value);
|
||||
}
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
@@ -1495,7 +1548,8 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
final Field aliasedField = Fields.field(alias, this.field.getName());
|
||||
return new OperationProjection(aliasedField, operation, values.toArray()) {
|
||||
|
||||
/* (non-Javadoc)
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.OperationProjection#getField()
|
||||
*/
|
||||
@Override
|
||||
@@ -1695,9 +1749,164 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document(field.getName(), expression.toDocument(context));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Projection} including all top level fields of the given target type mapped to include potentially
|
||||
* deviating field names.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class TypeProjection extends Projection {
|
||||
|
||||
private final Class<?> type;
|
||||
|
||||
TypeProjection(Class<?> type) {
|
||||
|
||||
super(Fields.field(type.getSimpleName()));
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
Document projections = new Document();
|
||||
|
||||
Fields fields = context.getFields(type);
|
||||
fields.forEach(it -> projections.append(it.getName(), 1));
|
||||
return context.getMappedObject(projections, type);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@code array} projections.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public static class ArrayProjectionOperationBuilder {
|
||||
|
||||
private ProjectionOperation target;
|
||||
private final List<Object> projections;
|
||||
|
||||
public ArrayProjectionOperationBuilder(ProjectionOperation target) {
|
||||
|
||||
this.target = target;
|
||||
this.projections = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one.
|
||||
*
|
||||
* @param expression
|
||||
* @return
|
||||
*/
|
||||
public ArrayProjectionOperationBuilder and(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "AggregationExpression must not be null!");
|
||||
|
||||
this.projections.add(expression);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one.
|
||||
*
|
||||
* @param field
|
||||
* @return
|
||||
*/
|
||||
public ArrayProjectionOperationBuilder and(Field field) {
|
||||
|
||||
Assert.notNull(field, "Field must not be null!");
|
||||
|
||||
this.projections.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public ArrayProjectionOperationBuilder and(Object value) {
|
||||
|
||||
this.projections.add(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the {@link ProjectionOperation} for the array property with given {@literal name}.
|
||||
*
|
||||
* @param name The target property name. Must not be {@literal null}.
|
||||
* @return new instance of {@link ArrayProjectionOperationBuilder}.
|
||||
*/
|
||||
public ProjectionOperation as(String name) {
|
||||
|
||||
return new ProjectionOperation(target.projections,
|
||||
Collections.singletonList(new ArrayProjection(Fields.field(name), this.projections)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
static class ArrayProjection extends Projection {
|
||||
|
||||
private final Field targetField;
|
||||
private final List<Object> projections;
|
||||
|
||||
public ArrayProjection(Field targetField, List<Object> projections) {
|
||||
|
||||
super(targetField);
|
||||
this.targetField = targetField;
|
||||
this.projections = projections;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
return new Document(targetField.getName(),
|
||||
projections.stream().map(it -> toArrayEntry(it, context)).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
private Object toArrayEntry(Object projection, AggregationOperationContext ctx) {
|
||||
|
||||
if (projection instanceof Field) {
|
||||
return ctx.getReference((Field) projection).toString();
|
||||
}
|
||||
|
||||
if (projection instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) projection).toDocument(ctx);
|
||||
}
|
||||
|
||||
if (projection instanceof FieldProjection) {
|
||||
return ctx.getReference(((FieldProjection) projection).getExposedField().getTarget()).toString();
|
||||
}
|
||||
|
||||
if (projection instanceof Projection) {
|
||||
((Projection) projection).toDocument(ctx);
|
||||
}
|
||||
|
||||
return projection;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,9 +17,12 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.springframework.data.mongodb.core.aggregation.Fields.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
@@ -27,6 +30,7 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldRefe
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -70,7 +74,16 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document) {
|
||||
return mapper.getMappedObject(document, mappingContext.getPersistentEntity(type));
|
||||
return getMappedObject(document, type);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document, @Nullable Class<?> type) {
|
||||
return mapper.getMappedObject(document, type != null ? mappingContext.getPersistentEntity(type) : null);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -79,8 +92,6 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
|
||||
PropertyPath.from(field.getTarget(), type);
|
||||
return getReferenceFor(field);
|
||||
}
|
||||
|
||||
@@ -93,6 +104,30 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return getReferenceFor(field(name));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Fields getFields(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
|
||||
if (entity == null) {
|
||||
return AggregationOperationContext.super.getFields(type);
|
||||
}
|
||||
|
||||
List<String> fields = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty property : entity) {
|
||||
fields.add(property.getName());
|
||||
}
|
||||
|
||||
return Fields.fields(fields.toArray(new String[0]));
|
||||
}
|
||||
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext
|
||||
|
||||
@@ -18,7 +18,9 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable;
|
||||
@@ -185,7 +187,8 @@ public class VariableOperators {
|
||||
map.putAll(context.getMappedObject(input));
|
||||
map.put("as", itemVariableName);
|
||||
map.put("in",
|
||||
functionToApply.toDocument(new NestedDelegatingExpressionAggregationOperationContext(operationContext)));
|
||||
functionToApply.toDocument(new NestedDelegatingExpressionAggregationOperationContext(operationContext,
|
||||
Collections.singleton(Fields.field(itemVariableName)))));
|
||||
|
||||
return new Document("$map", map);
|
||||
}
|
||||
@@ -322,12 +325,14 @@ public class VariableOperators {
|
||||
|
||||
private Document getMappedVariable(ExpressionVariable var, AggregationOperationContext context) {
|
||||
|
||||
return new Document(var.variableName, var.expression instanceof AggregationExpression
|
||||
? ((AggregationExpression) var.expression).toDocument(context) : var.expression);
|
||||
return new Document(var.variableName,
|
||||
var.expression instanceof AggregationExpression ? ((AggregationExpression) var.expression).toDocument(context)
|
||||
: var.expression);
|
||||
}
|
||||
|
||||
private Object getMappedIn(AggregationOperationContext context) {
|
||||
return expression.toDocument(new NestedDelegatingExpressionAggregationOperationContext(context));
|
||||
return expression.toDocument(new NestedDelegatingExpressionAggregationOperationContext(context,
|
||||
this.vars.stream().map(var -> Fields.field(var.variableName)).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -16,12 +16,15 @@
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.Date;
|
||||
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.ConverterBuilder;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
@@ -93,6 +96,24 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
conversionService.addConverter(BigIntegerToObjectIdConverter.INSTANCE);
|
||||
}
|
||||
|
||||
if (!conversionService.canConvert(Date.class, Long.class)) {
|
||||
conversionService
|
||||
.addConverter(ConverterBuilder.writing(Date.class, Long.class, Date::getTime).getWritingConverter());
|
||||
}
|
||||
|
||||
if (!conversionService.canConvert(Long.class, Date.class)) {
|
||||
conversionService.addConverter(ConverterBuilder.reading(Long.class, Date.class, Date::new).getReadingConverter());
|
||||
}
|
||||
|
||||
if (!conversionService.canConvert(ObjectId.class, Date.class)) {
|
||||
|
||||
conversionService.addConverter(
|
||||
ConverterBuilder.reading(ObjectId.class, Date.class, objectId -> new Date(objectId.getTimestamp()))
|
||||
.getReadingConverter());
|
||||
}
|
||||
|
||||
conversionService
|
||||
.addConverter(ConverterBuilder.reading(Code.class, String.class, Code::getCode).getReadingConverter());
|
||||
conversions.registerConvertersIn(conversionService);
|
||||
}
|
||||
|
||||
|
||||
@@ -30,11 +30,13 @@ import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.bson.codecs.DecoderContext;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.json.JsonReader;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
@@ -57,6 +59,7 @@ import org.springframework.data.mapping.model.PropertyValueProvider;
|
||||
import org.springframework.data.mapping.model.SpELContext;
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
@@ -103,6 +106,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected @Nullable ApplicationContext applicationContext;
|
||||
protected MongoTypeMapper typeMapper;
|
||||
protected @Nullable String mapKeyDotReplacement = null;
|
||||
protected @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private SpELContext spELContext;
|
||||
|
||||
@@ -141,6 +145,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
public MappingMongoConverter(MongoDbFactory mongoDbFactory,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
this(new DefaultDbRefResolver(mongoDbFactory), mappingContext);
|
||||
setCodecRegistryProvider(mongoDbFactory);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -178,6 +183,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.mapKeyDotReplacement = mapKeyDotReplacement;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure a {@link CodecRegistryProvider} that provides native MongoDB {@link org.bson.codecs.Codec codecs} for
|
||||
* reading values.
|
||||
*
|
||||
* @param codecRegistryProvider can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void setCodecRegistryProvider(@Nullable CodecRegistryProvider codecRegistryProvider) {
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.convert.EntityConverter#getMappingContext()
|
||||
@@ -253,6 +269,16 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(typeToUse);
|
||||
|
||||
if (entity == null) {
|
||||
|
||||
if (codecRegistryProvider != null) {
|
||||
|
||||
Optional<? extends Codec<? extends S>> codec = codecRegistryProvider.getCodecFor(rawType);
|
||||
if(codec.isPresent()) {
|
||||
return codec.get().decode(new JsonReader(target.toJson()),
|
||||
DecoderContext.builder().build());
|
||||
}
|
||||
}
|
||||
|
||||
throw new MappingException(String.format(INVALID_TYPE_TO_READ, target, typeToUse.getType()));
|
||||
}
|
||||
|
||||
@@ -1650,6 +1676,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
target.spELContext = spELContext;
|
||||
target.setInstantiators(instantiators);
|
||||
target.typeMapper = typeMapper;
|
||||
target.setCodecRegistryProvider(dbFactory);
|
||||
target.afterPropertiesSet();
|
||||
|
||||
return target;
|
||||
|
||||
@@ -17,8 +17,10 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
@@ -46,14 +48,14 @@ class ObjectPath {
|
||||
private final @Nullable ObjectPath parent;
|
||||
private final @Nullable Object object;
|
||||
private final @Nullable Object idValue;
|
||||
private final String collection;
|
||||
private final Lazy<String> collection;
|
||||
|
||||
private ObjectPath() {
|
||||
|
||||
this.parent = null;
|
||||
this.object = null;
|
||||
this.idValue = null;
|
||||
this.collection = "";
|
||||
this.collection = Lazy.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -64,7 +66,7 @@ class ObjectPath {
|
||||
* @param idValue
|
||||
* @param collection
|
||||
*/
|
||||
private ObjectPath(ObjectPath parent, Object object, @Nullable Object idValue, String collection) {
|
||||
private ObjectPath(ObjectPath parent, Object object, @Nullable Object idValue, Lazy<String> collection) {
|
||||
|
||||
this.parent = parent;
|
||||
this.object = object;
|
||||
@@ -85,7 +87,7 @@ class ObjectPath {
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
Assert.notNull(entity, "MongoPersistentEntity must not be null!");
|
||||
|
||||
return new ObjectPath(this, object, id, entity.getCollection());
|
||||
return new ObjectPath(this, object, id, Lazy.of(entity::getCollection));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -175,7 +177,7 @@ class ObjectPath {
|
||||
}
|
||||
|
||||
private String getCollection() {
|
||||
return collection;
|
||||
return collection.get();
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -33,7 +33,6 @@ import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -132,10 +131,8 @@ public class QueryMapper {
|
||||
// TODO: remove one once QueryMapper can work with Query instances directly
|
||||
if (Query.isRestrictedTypeKey(key)) {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Set<Class<?>> restrictedTypes = BsonUtils.get(query, key);
|
||||
this.converter.getTypeMapper().writeTypeRestrictions(result, restrictedTypes);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -282,7 +279,7 @@ public class QueryMapper {
|
||||
if (keyword.isOrOrNor() || (keyword.hasIterableValue() && !keyword.isGeometry())) {
|
||||
|
||||
Iterable<?> conditions = keyword.getValue();
|
||||
List<Object> newConditions = new ArrayList<Object>();
|
||||
List<Object> newConditions = new ArrayList<>();
|
||||
|
||||
for (Object condition : conditions) {
|
||||
newConditions.add(isDocument(condition) ? getMappedObject((Document) condition, entity)
|
||||
@@ -293,11 +290,12 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
if (keyword.isSample()) {
|
||||
return exampleMapper.getMappedExample(keyword.<Example<?>> getValue(), entity);
|
||||
return exampleMapper.getMappedExample(keyword.getValue(), entity);
|
||||
}
|
||||
|
||||
if (keyword.isJsonSchema()) {
|
||||
return schemaMapper.mapSchema(new Document(keyword.getKey(), keyword.getValue()), entity.getType());
|
||||
return schemaMapper.mapSchema(new Document(keyword.getKey(), keyword.getValue()),
|
||||
entity != null ? entity.getType() : Object.class);
|
||||
}
|
||||
|
||||
return new Document(keyword.getKey(), convertSimpleOrDocument(keyword.getValue(), entity));
|
||||
@@ -318,6 +316,10 @@ public class QueryMapper {
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property)
|
||||
: getMappedValue(property.with(keyword.getKey()), value);
|
||||
|
||||
if (keyword.isSample() && convertedValue instanceof Document) {
|
||||
return (Document) convertedValue;
|
||||
}
|
||||
|
||||
return new Document(keyword.key, convertedValue);
|
||||
}
|
||||
|
||||
@@ -325,17 +327,16 @@ public class QueryMapper {
|
||||
* Returns the mapped value for the given source object assuming it's a value for the given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param documentField the key the value will be bound to eventually
|
||||
* @param value the source object to be mapped
|
||||
* @param property the property the value is a value for
|
||||
* @param newKey the key the value will be bound to eventually
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object getMappedValue(Field documentField, Object value) {
|
||||
|
||||
if(documentField.getProperty() != null && documentField.getProperty().hasExplicitWriteTarget()) {
|
||||
if(conversionService.canConvert(value.getClass(), documentField.getProperty().getFieldType())) {
|
||||
if (documentField.getProperty() != null && documentField.getProperty().hasExplicitWriteTarget()) {
|
||||
if (conversionService.canConvert(value.getClass(), documentField.getProperty().getFieldType())) {
|
||||
value = conversionService.convert(value, documentField.getProperty().getFieldType());
|
||||
}
|
||||
}
|
||||
@@ -348,7 +349,7 @@ public class QueryMapper {
|
||||
|
||||
if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
List<Object> ids = new ArrayList<>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id, getIdTypeForField(documentField)));
|
||||
}
|
||||
@@ -367,7 +368,7 @@ public class QueryMapper {
|
||||
|
||||
if (valueDbo.containsKey("$in") || valueDbo.containsKey("$nin")) {
|
||||
String inKey = valueDbo.containsKey("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
List<Object> ids = new ArrayList<>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id, getIdTypeForField(documentField)));
|
||||
}
|
||||
@@ -448,6 +449,10 @@ public class QueryMapper {
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof Example) {
|
||||
return exampleMapper.getMappedExample((Example) source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof List) {
|
||||
return delegateConvertToMongoType(source, entity);
|
||||
}
|
||||
@@ -1093,6 +1098,11 @@ public class QueryMapper {
|
||||
private PropertyPath forName(String path) {
|
||||
|
||||
try {
|
||||
|
||||
if (entity.getPersistentProperty(path) != null) {
|
||||
return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation());
|
||||
}
|
||||
|
||||
return PropertyPath.from(path, entity.getTypeInformation());
|
||||
} catch (PropertyReferenceException | InvalidPersistentPropertyPath e) {
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
/**
|
||||
* Interface definition for structures defined in GeoJSON ({@link http://geojson.org/}) format.
|
||||
* Interface definition for structures defined in GeoJSON ({@link https://geojson.org/}) format.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
@@ -27,7 +27,7 @@ public interface GeoJson<T extends Iterable<?>> {
|
||||
* String value representing the type of the {@link GeoJson} object.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#geojson-objects">http://geojson.org/geojson-spec.html#geojson-objects</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#geojson-objects">https://geojson.org/geojson-spec.html#geojson-objects</a>
|
||||
*/
|
||||
String getType();
|
||||
|
||||
@@ -36,7 +36,7 @@ public interface GeoJson<T extends Iterable<?>> {
|
||||
* determined by {@link #getType()} of geometry.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#geometry-objects">http://geojson.org/geojson-spec.html#geometry-objects</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#geometry-objects">https://geojson.org/geojson-spec.html#geometry-objects</a>
|
||||
*/
|
||||
T getCoordinates();
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#geometry-collection">http://geojson.org/geojson-spec.html#geometry-collection</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#geometry-collection">https://geojson.org/geojson-spec.html#geometry-collection</a>
|
||||
*/
|
||||
public class GeoJsonGeometryCollection implements GeoJson<Iterable<GeoJson<?>>> {
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import org.springframework.data.geo.Point;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#linestring">http://geojson.org/geojson-spec.html#linestring</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#linestring">https://geojson.org/geojson-spec.html#linestring</a>
|
||||
*/
|
||||
public class GeoJsonLineString extends GeoJsonMultiPoint {
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#multilinestring">http://geojson.org/geojson-spec.html#multilinestring</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#multilinestring">https://geojson.org/geojson-spec.html#multilinestring</a>
|
||||
*/
|
||||
public class GeoJsonMultiLineString implements GeoJson<Iterable<GeoJsonLineString>> {
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#multipoint">http://geojson.org/geojson-spec.html#multipoint</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#multipoint">https://geojson.org/geojson-spec.html#multipoint</a>
|
||||
*/
|
||||
public class GeoJsonMultiPoint implements GeoJson<Iterable<Point>> {
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ import org.springframework.data.geo.Point;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#point">http://geojson.org/geojson-spec.html#point</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#point">https://geojson.org/geojson-spec.html#point</a>
|
||||
*/
|
||||
public class GeoJsonPoint extends Point implements GeoJson<List<Double>> {
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ import org.springframework.util.Assert;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#polygon">http://geojson.org/geojson-spec.html#polygon</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#polygon">https://geojson.org/geojson-spec.html#polygon</a>
|
||||
*/
|
||||
public class GeoJsonPolygon extends Polygon implements GeoJson<List<GeoJsonLineString>> {
|
||||
|
||||
|
||||
@@ -17,12 +17,29 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Repeatable;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Mark a class to use compound indexes.
|
||||
* <p />
|
||||
* <p>
|
||||
* <b>NOTE: This annotation is repeatable according to Java 8 conventions using {@link CompoundIndexes#value()} as
|
||||
* container.</b>
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* @CompoundIndex(def = "{'firstname': 1, 'lastname': 1}")
|
||||
* @CompoundIndex(def = "{'address.city': 1, 'address.street': 1}")
|
||||
* class Person {
|
||||
* String firstname;
|
||||
* String lastname;
|
||||
*
|
||||
* Address address;
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
@@ -32,6 +49,7 @@ import java.lang.annotation.Target;
|
||||
*/
|
||||
@Target({ ElementType.TYPE })
|
||||
@Documented
|
||||
@Repeatable(CompoundIndexes.class)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface CompoundIndex {
|
||||
|
||||
@@ -42,9 +60,7 @@ public @interface CompoundIndex {
|
||||
* <br />
|
||||
* If left empty on nested document, the whole document will be indexed.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* @CompoundIndex(def = "{'h1': 1, 'h2': 1}")
|
||||
* class JsonStringIndexDefinition {
|
||||
@@ -56,7 +72,6 @@ public @interface CompoundIndex {
|
||||
* class ExpressionIndexDefinition {
|
||||
* String h1, h2;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
@@ -106,35 +121,31 @@ public @interface CompoundIndex {
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* @CompoundIndex(name = "compound_index", def = "{'h1': 1, 'h2': 1}")
|
||||
* class Hybrid {
|
||||
* String h1, h2;
|
||||
* String h1, h2;
|
||||
* }
|
||||
*
|
||||
* @CompoundIndex(name = "compound_index", def = "{'n1': 1, 'n2': 1}")
|
||||
* class Nested {
|
||||
* String n1, n2;
|
||||
* String n1, n2;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* <pre class="code">
|
||||
* db.root.createIndex( { hybrid.h1: 1, hybrid.h2: 1 } , { name: "hybrid.compound_index" } )
|
||||
* db.root.createIndex( { nested.n1: 1, nested.n2: 1 } , { name: "nested.compound_index" } )
|
||||
* db.hybrid.createIndex( { h1: 1, h2: 1 } , { name: "compound_index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -15,15 +15,24 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* Container annotation that allows to collect multiple {@link CompoundIndex} annotations.
|
||||
* <p>
|
||||
* Can be used natively, declaring several nested {@link CompoundIndex} annotations. Can also be used in conjunction
|
||||
* with Java 8's support for <em>repeatable annotations</em>, where {@link CompoundIndex} can simply be declared several
|
||||
* times on the same {@linkplain ElementType#TYPE type}, implicitly generating this container annotation.
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@Target({ ElementType.TYPE })
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface CompoundIndexes {
|
||||
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation for a property that should be used as key for a
|
||||
* <a href="https://docs.mongodb.com/manual/core/index-hashed/">Hashed Index</a>. If used on a simple property, the
|
||||
* index uses a hashing function to compute the hash of the value of the index field. Added to a property of complex
|
||||
* type the embedded document is collapsed and the hash computed for the entire object.
|
||||
* <p />
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* public class DomainType {
|
||||
*
|
||||
* @HashIndexed @Id String id;
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* {@link HashIndexed} can also be used as meta {@link java.lang.annotation.Annotation} to create composed annotations:
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Indexed
|
||||
* @HashIndexed
|
||||
* @Retention(RetentionPolicy.RUNTIME)
|
||||
* public @interface IndexAndHash {
|
||||
*
|
||||
* @AliasFor(annotation = Indexed.class, attribute = "name")
|
||||
* String name() default "";
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* public class DomainType {
|
||||
*
|
||||
* @ComposedHashIndexed(name = "idx-name") String value;
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @see HashedIndex
|
||||
*/
|
||||
@Target({ ElementType.ANNOTATION_TYPE, ElementType.FIELD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface HashIndexed {
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link IndexDefinition} implementation for MongoDB
|
||||
* <a href="https://docs.mongodb.com/manual/core/index-hashed/">Hashed Indexes</a> maintaining entries with hashes of
|
||||
* the values of the indexed field.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class HashedIndex implements IndexDefinition {
|
||||
|
||||
private final String field;
|
||||
|
||||
private HashedIndex(String field) {
|
||||
|
||||
Assert.hasText(field, "Field must not be null nor empty!");
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link HashedIndex} for the given field.
|
||||
*
|
||||
* @param field must not be {@literal null} nor empty.
|
||||
* @return
|
||||
*/
|
||||
public static HashedIndex hashed(String field) {
|
||||
return new HashedIndex(field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys()
|
||||
*/
|
||||
@Override
|
||||
public Document getIndexKeys() {
|
||||
return new Document(field, "hashed");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions()
|
||||
*/
|
||||
@Override
|
||||
public Document getIndexOptions() {
|
||||
return new Document();
|
||||
}
|
||||
}
|
||||
@@ -26,11 +26,10 @@ import org.springframework.util.ObjectUtils;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public final class IndexField {
|
||||
|
||||
enum Type {
|
||||
GEO, TEXT, DEFAULT;
|
||||
GEO, TEXT, DEFAULT, HASH;
|
||||
}
|
||||
|
||||
private final String key;
|
||||
@@ -49,7 +48,9 @@ public final class IndexField {
|
||||
if (Type.GEO.equals(type) || Type.TEXT.equals(type)) {
|
||||
Assert.isNull(direction, "Geo/Text indexes must not have a direction!");
|
||||
} else {
|
||||
Assert.notNull(direction, "Default indexes require a direction");
|
||||
if (!Type.HASH.equals(type)) {
|
||||
Assert.notNull(direction, "Default indexes require a direction");
|
||||
}
|
||||
}
|
||||
|
||||
this.key = key;
|
||||
@@ -65,6 +66,17 @@ public final class IndexField {
|
||||
return new IndexField(key, order, Type.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@literal hashed} {@link IndexField} for the given key.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @return new instance of {@link IndexField}.
|
||||
* @since 2.2
|
||||
*/
|
||||
static IndexField hashed(String key) {
|
||||
return new IndexField(key, null, Type.HASH);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geo {@link IndexField} for the given key.
|
||||
*
|
||||
@@ -120,6 +132,16 @@ public final class IndexField {
|
||||
return Type.TEXT.equals(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link IndexField} is a {@literal hashed}.
|
||||
*
|
||||
* @return {@literal true} if {@link IndexField} is hashed.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isHashed() {
|
||||
return Type.HASH.equals(type);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
@@ -27,9 +28,12 @@ import java.util.Optional;
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Index information for a MongoDB index.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
@@ -47,6 +51,7 @@ public class IndexInfo {
|
||||
private final boolean unique;
|
||||
private final boolean sparse;
|
||||
private final String language;
|
||||
private @Nullable Duration expireAfter;
|
||||
private @Nullable String partialFilterExpression;
|
||||
private @Nullable Document collation;
|
||||
|
||||
@@ -91,12 +96,17 @@ public class IndexInfo {
|
||||
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
if (ObjectUtils.nullSafeEquals("hashed", value)) {
|
||||
indexFields.add(IndexField.hashed(key));
|
||||
} else {
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -108,11 +118,19 @@ public class IndexInfo {
|
||||
String language = sourceDocument.containsKey("default_language") ? (String) sourceDocument.get("default_language")
|
||||
: "";
|
||||
String partialFilter = sourceDocument.containsKey("partialFilterExpression")
|
||||
? ((Document) sourceDocument.get("partialFilterExpression")).toJson() : null;
|
||||
? ((Document) sourceDocument.get("partialFilterExpression")).toJson()
|
||||
: null;
|
||||
|
||||
IndexInfo info = new IndexInfo(indexFields, name, unique, sparse, language);
|
||||
info.partialFilterExpression = partialFilter;
|
||||
info.collation = sourceDocument.get("collation", Document.class);
|
||||
|
||||
if (sourceDocument.containsKey("expireAfterSeconds")) {
|
||||
|
||||
Number expireAfterSeconds = sourceDocument.get("expireAfterSeconds", Number.class);
|
||||
info.expireAfter = Duration.ofSeconds(NumberUtils.convertNumberToTargetClass(expireAfterSeconds, Long.class));
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
@@ -183,11 +201,30 @@ public class IndexInfo {
|
||||
return Optional.ofNullable(collation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the duration after which documents within the index expire.
|
||||
*
|
||||
* @return the expiration time if set, {@link Optional#empty()} otherwise.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<Duration> getExpireAfter() {
|
||||
return Optional.ofNullable(expireAfter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a hashed index field is present.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isHashed() {
|
||||
return getIndexFields().stream().anyMatch(IndexField::isHashed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", sparse=" + sparse
|
||||
+ ", language=" + language + ", partialFilterExpression=" + partialFilterExpression + ", collation=" + collation
|
||||
+ "]";
|
||||
+ ", expireAfterSeconds=" + ObjectUtils.nullSafeToString(expireAfter) + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -201,6 +238,7 @@ public class IndexInfo {
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(language);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(partialFilterExpression);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(collation);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(expireAfter);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -242,8 +280,10 @@ public class IndexInfo {
|
||||
if (!ObjectUtils.nullSafeEquals(partialFilterExpression, other.partialFilterExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(collation, collation)) {
|
||||
if (!ObjectUtils.nullSafeEquals(collation, other.collation)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(expireAfter, other.expireAfter)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
||||
@@ -15,7 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -37,7 +40,8 @@ public interface IndexResolver {
|
||||
* @return the new {@link IndexResolver}.
|
||||
* @since 2.2
|
||||
*/
|
||||
static IndexResolver create(MongoMappingContext mappingContext) {
|
||||
static IndexResolver create(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "MongoMappingContext must not be null!");
|
||||
|
||||
|
||||
@@ -134,16 +134,18 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
private void checkForAndCreateIndexes(MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.isAnnotationPresent(Document.class)) {
|
||||
|
||||
String collection = entity.getCollection();
|
||||
|
||||
for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder
|
||||
? (IndexDefinitionHolder) indexDefinition
|
||||
: new IndexDefinitionHolder("", indexDefinition, entity.getCollection());
|
||||
: new IndexDefinitionHolder("", indexDefinition, collection);
|
||||
|
||||
createIndex(indexToCreate);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,13 +42,13 @@ import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.TextIndexIncludeOptions.IncludeStrategy;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexedFieldSpec;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.spel.EvaluationContextProvider;
|
||||
@@ -81,7 +81,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexResolver.class);
|
||||
private static final SpelExpressionParser PARSER = new SpelExpressionParser();
|
||||
|
||||
private final MongoMappingContext mappingContext;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private EvaluationContextProvider evaluationContextProvider = EvaluationContextProvider.DEFAULT;
|
||||
|
||||
/**
|
||||
@@ -89,7 +89,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexResolver(MongoMappingContext mappingContext) {
|
||||
public MongoPersistentEntityIndexResolver(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "Mapping context must not be null in order to resolve index definitions");
|
||||
this.mappingContext = mappingContext;
|
||||
@@ -104,28 +105,30 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the {@link IndexDefinition}s for given {@literal root} entity by traversing {@link MongoPersistentProperty}
|
||||
* scanning for index annotations {@link Indexed}, {@link CompoundIndex} and {@link GeospatialIndex}. The given
|
||||
* {@literal root} has therefore to be annotated with {@link Document}.
|
||||
* Resolve the {@link IndexDefinition}s for a given {@literal root} entity by traversing
|
||||
* {@link MongoPersistentProperty} scanning for index annotations {@link Indexed}, {@link CompoundIndex} and
|
||||
* {@link GeospatialIndex}. The given {@literal root} has therefore to be annotated with {@link Document}.
|
||||
*
|
||||
* @param root must not be null.
|
||||
* @return List of {@link IndexDefinitionHolder}. Will never be {@code null}.
|
||||
* @throws IllegalArgumentException in case of missing {@link Document} annotation marking root entities.
|
||||
*/
|
||||
public List<IndexDefinitionHolder> resolveIndexForEntity(final MongoPersistentEntity<?> root) {
|
||||
public List<IndexDefinitionHolder> resolveIndexForEntity(MongoPersistentEntity<?> root) {
|
||||
|
||||
Assert.notNull(root, "Index cannot be resolved for given 'null' entity.");
|
||||
Assert.notNull(root, "MongoPersistentEntity must not be null!");
|
||||
Document document = root.findAnnotation(Document.class);
|
||||
Assert.notNull(document, "Given entity is not collection root.");
|
||||
Assert.notNull(document, () -> String
|
||||
.format("Entity %s is not a collection root. Make sure to annotate it with @Document!", root.getName()));
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", root.getCollection(), root));
|
||||
indexInformation.addAll(potentiallyCreateTextIndexDefinition(root));
|
||||
List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
String collection = root.getCollection();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root));
|
||||
indexInformation.addAll(potentiallyCreateTextIndexDefinition(root, collection));
|
||||
|
||||
root.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> this
|
||||
.potentiallyAddIndexForProperty(root, property, indexInformation, new CycleGuard()));
|
||||
|
||||
indexInformation.addAll(resolveIndexesForDbrefs("", root.getCollection(), root));
|
||||
indexInformation.addAll(resolveIndexesForDbrefs("", collection, root));
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
@@ -139,10 +142,10 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
persistentProperty.getFieldName(), Path.of(persistentProperty), root.getCollection(), guard));
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(
|
||||
persistentProperty.getFieldName(), root.getCollection(), persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
}
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
@@ -169,14 +172,14 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(dotPath, collection, entity));
|
||||
|
||||
entity.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> this
|
||||
.guradAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard));
|
||||
.guardAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard));
|
||||
|
||||
indexInformation.addAll(resolveIndexesForDbrefs(dotPath, collection, entity));
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
private void guradAndPotentiallyAddIndexForProperty(MongoPersistentProperty persistentProperty, String dotPath,
|
||||
private void guardAndPotentiallyAddIndexForProperty(MongoPersistentProperty persistentProperty, String dotPath,
|
||||
Path path, String collection, List<IndexDefinitionHolder> indexes, CycleGuard guard) {
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(dotPath) ? dotPath + "." : "") + persistentProperty.getFieldName();
|
||||
@@ -193,25 +196,30 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
persistentProperty);
|
||||
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private IndexDefinitionHolder createIndexDefinitionHolderForProperty(String dotPath, String collection,
|
||||
private List<IndexDefinitionHolder> createIndexDefinitionHolderForProperty(String dotPath, String collection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
List<IndexDefinitionHolder> indices = new ArrayList<>(2);
|
||||
|
||||
if (persistentProperty.isAnnotationPresent(Indexed.class)) {
|
||||
return createIndexDefinition(dotPath, collection, persistentProperty);
|
||||
indices.add(createIndexDefinition(dotPath, collection, persistentProperty));
|
||||
} else if (persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
return createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty);
|
||||
indices.add(createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty));
|
||||
}
|
||||
|
||||
return null;
|
||||
if (persistentProperty.isAnnotationPresent(HashIndexed.class)) {
|
||||
indices.add(createHashedIndexDefinition(dotPath, collection, persistentProperty));
|
||||
}
|
||||
|
||||
return indices;
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> potentiallyCreateCompoundIndexDefinitions(String dotPath, String collection,
|
||||
@@ -225,7 +233,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(
|
||||
MongoPersistentEntity<?> root) {
|
||||
MongoPersistentEntity<?> root, String collection) {
|
||||
|
||||
String name = root.getType().getSimpleName() + "_TextIndex";
|
||||
if (name.getBytes().length > 127) {
|
||||
@@ -255,13 +263,17 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
|
||||
if (root.hasCollation()) {
|
||||
indexDefinitionBuilder.withSimpleCollation();
|
||||
}
|
||||
|
||||
TextIndexDefinition indexDefinition = indexDefinitionBuilder.build();
|
||||
|
||||
if (!indexDefinition.hasFieldSpec()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, root.getCollection());
|
||||
IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, collection);
|
||||
return Collections.singletonList(holder);
|
||||
|
||||
}
|
||||
@@ -322,11 +334,12 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of given type.
|
||||
* Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of a given
|
||||
* type.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param fallbackCollection
|
||||
* @param type
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
protected List<IndexDefinitionHolder> createCompoundIndexDefinitions(String dotPath, String fallbackCollection,
|
||||
@@ -404,19 +417,19 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for given
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for a given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param collection
|
||||
* @param persitentProperty
|
||||
* @param persistentProperty
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
protected IndexDefinitionHolder createIndexDefinition(String dotPath, String collection,
|
||||
MongoPersistentProperty persitentProperty) {
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
Indexed index = persitentProperty.findAnnotation(Indexed.class);
|
||||
Indexed index = persistentProperty.findAnnotation(Indexed.class);
|
||||
|
||||
if (index == null) {
|
||||
return null;
|
||||
@@ -426,7 +439,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
IndexDirection.ASCENDING.equals(index.direction()) ? Sort.Direction.ASC : Sort.Direction.DESC);
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty.getOwner(), persitentProperty));
|
||||
indexDefinition
|
||||
.named(pathAwareIndexName(index.name(), dotPath, persistentProperty.getOwner(), persistentProperty));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
@@ -454,7 +468,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
Duration timeout = computeIndexTimeout(index.expireAfter(),
|
||||
getEvaluationContextForProperty(persitentProperty.getOwner()));
|
||||
getEvaluationContextForProperty(persistentProperty.getOwner()));
|
||||
if (!timeout.isZero() && !timeout.isNegative()) {
|
||||
indexDefinition.expire(timeout);
|
||||
}
|
||||
@@ -463,6 +477,29 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link HashedIndex} wrapped in {@link IndexDefinitionHolder} out of {@link HashIndexed} for a given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param collection
|
||||
* @param persistentProperty
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
protected IndexDefinitionHolder createHashedIndexDefinition(String dotPath, String collection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
HashIndexed index = persistentProperty.findAnnotation(HashIndexed.class);
|
||||
|
||||
if (index == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, HashedIndex.hashed(dotPath), collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default {@link EvaluationContext}.
|
||||
*
|
||||
@@ -587,11 +624,11 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
propertyDotPath));
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
property);
|
||||
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -125,11 +125,13 @@ public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
List<Mono<?>> publishers = new ArrayList<>();
|
||||
|
||||
if (entity.isAnnotationPresent(Document.class)) {
|
||||
|
||||
String collection = entity.getCollection();
|
||||
for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
|
||||
IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder
|
||||
? (IndexDefinitionHolder) indexDefinition
|
||||
: new IndexDefinitionHolder("", indexDefinition, entity.getCollection());
|
||||
: new IndexDefinitionHolder("", indexDefinition, collection);
|
||||
|
||||
publishers.add(createIndex(indexToCreate));
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.Set;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -40,9 +41,10 @@ public class TextIndexDefinition implements IndexDefinition {
|
||||
private @Nullable String defaultLanguage;
|
||||
private @Nullable String languageOverride;
|
||||
private @Nullable IndexFilter filter;
|
||||
private @Nullable Collation collation;
|
||||
|
||||
TextIndexDefinition() {
|
||||
fieldSpecs = new LinkedHashSet<TextIndexedFieldSpec>();
|
||||
fieldSpecs = new LinkedHashSet<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -116,6 +118,10 @@ public class TextIndexDefinition implements IndexDefinition {
|
||||
options.put("default_language", defaultLanguage);
|
||||
}
|
||||
|
||||
if (collation != null) {
|
||||
options.put("collation", collation.toDocument());
|
||||
}
|
||||
|
||||
Document weightsDocument = new Document();
|
||||
for (TextIndexedFieldSpec fieldSpec : fieldSpecs) {
|
||||
if (fieldSpec.isWeighted()) {
|
||||
@@ -348,6 +354,17 @@ public class TextIndexDefinition implements IndexDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure to use simple {@link Collation}. Required if the collection uses a non-simple collation.
|
||||
*
|
||||
* @since 2.2
|
||||
*/
|
||||
public TextIndexDefinitionBuilder withSimpleCollation() {
|
||||
|
||||
this.instance.collation = Collation.simple();
|
||||
return this;
|
||||
}
|
||||
|
||||
public TextIndexDefinition build() {
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
@@ -60,6 +60,9 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
|
||||
private final @Nullable Expression expression;
|
||||
|
||||
private final @Nullable String collation;
|
||||
private final @Nullable Expression collationExpression;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the
|
||||
* collection name to the entities simple type name.
|
||||
@@ -78,12 +81,16 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
|
||||
this.collection = StringUtils.hasText(document.collection()) ? document.collection() : fallback;
|
||||
this.language = StringUtils.hasText(document.language()) ? document.language() : "";
|
||||
this.expression = detectExpression(document);
|
||||
this.expression = detectExpression(document.collection());
|
||||
this.collation = document.collation();
|
||||
this.collationExpression = detectExpression(document.collation());
|
||||
} else {
|
||||
|
||||
this.collection = fallback;
|
||||
this.language = "";
|
||||
this.expression = null;
|
||||
this.collation = null;
|
||||
this.collationExpression = null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,6 +133,33 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
return getTextScoreProperty() != null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public org.springframework.data.mongodb.core.query.Collation getCollation() {
|
||||
|
||||
Object collationValue = collationExpression != null ? expression.getValue(getEvaluationContext(null), String.class)
|
||||
: this.collation;
|
||||
|
||||
if (collationValue == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (collationValue instanceof org.bson.Document) {
|
||||
return org.springframework.data.mongodb.core.query.Collation.from((org.bson.Document) collationValue);
|
||||
}
|
||||
|
||||
if (collationValue instanceof org.springframework.data.mongodb.core.query.Collation) {
|
||||
return org.springframework.data.mongodb.core.query.Collation.class.cast(collationValue);
|
||||
}
|
||||
|
||||
return StringUtils.hasText(collationValue.toString())
|
||||
? org.springframework.data.mongodb.core.query.Collation.parse(collationValue.toString())
|
||||
: null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.BasicPersistentEntity#verify()
|
||||
@@ -246,24 +280,20 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a SpEL {@link Expression} frór the collection String expressed in the given {@link Document} annotation if
|
||||
* present or {@literal null} otherwise. Will also return {@literal null} it the collection {@link String} evaluates
|
||||
* to a {@link LiteralExpression} (indicating that no subsequent evaluation is necessary).
|
||||
* Returns a SpEL {@link Expression} if the given {@link String} is actually an expression that does not evaluate to a
|
||||
* {@link LiteralExpression} (indicating that no subsequent evaluation is necessary).
|
||||
*
|
||||
* @param document can be {@literal null}
|
||||
* @param potentialExpression can be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private static Expression detectExpression(Document document) {
|
||||
private static Expression detectExpression(@Nullable String potentialExpression) {
|
||||
|
||||
String collection = document.collection();
|
||||
|
||||
if (!StringUtils.hasText(collection)) {
|
||||
if (!StringUtils.hasText(potentialExpression)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Expression expression = PARSER.parseExpression(document.collection(), ParserContext.TEMPLATE_EXPRESSION);
|
||||
|
||||
Expression expression = PARSER.parseExpression(potentialExpression, ParserContext.TEMPLATE_EXPRESSION);
|
||||
return expression instanceof LiteralExpression ? null : expression;
|
||||
}
|
||||
|
||||
|
||||
@@ -60,9 +60,17 @@ public @interface Document {
|
||||
/**
|
||||
* Defines the default language to be used with this document.
|
||||
*
|
||||
* @since 1.6
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
String language() default "";
|
||||
|
||||
/**
|
||||
* Defines the collation to apply when executing a query or creating indexes.
|
||||
*
|
||||
* @return an empty {@link String} by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
String collation() default "";
|
||||
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user