Compare commits
214 Commits
2.2.0.M3
...
2.1.14.REL
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8e51d005d5 | ||
|
|
1940a5c2c2 | ||
|
|
5a274029d7 | ||
|
|
c342bf266e | ||
|
|
11baf455d2 | ||
|
|
05882813ac | ||
|
|
bd3f26c928 | ||
|
|
5555aa970b | ||
|
|
e74fe05abd | ||
|
|
d579254fbc | ||
|
|
71c8e4cc02 | ||
|
|
a087c7d17c | ||
|
|
90cec275a6 | ||
|
|
e4eefe577d | ||
|
|
b57a6612f6 | ||
|
|
62b2d54e0d | ||
|
|
aff823da57 | ||
|
|
d45b630724 | ||
|
|
fc8c97aeb0 | ||
|
|
004e7f01b2 | ||
|
|
5c80ee0087 | ||
|
|
adb9dc29a2 | ||
|
|
6eb6feadbb | ||
|
|
166aab39c4 | ||
|
|
fa94c22c2a | ||
|
|
e0f88a8b84 | ||
|
|
59aa8051d3 | ||
|
|
205a06e79a | ||
|
|
899b43a29b | ||
|
|
0f0a4ed31b | ||
|
|
9acc8d5268 | ||
|
|
313ffb5426 | ||
|
|
dc859953f4 | ||
|
|
bc29f2b24e | ||
|
|
686cdac73f | ||
|
|
b7b339577b | ||
|
|
2166a6e953 | ||
|
|
3c601a699a | ||
|
|
37211fc6d7 | ||
|
|
a45c9040c4 | ||
|
|
23c0a07b93 | ||
|
|
f3a7d6a20e | ||
|
|
0d22d831f8 | ||
|
|
6b0e2ab5de | ||
|
|
5d02b84856 | ||
|
|
93e911985e | ||
|
|
e7faa1a1ec | ||
|
|
631714941a | ||
|
|
db9428cebe | ||
|
|
4be53ac952 | ||
|
|
564acd75d5 | ||
|
|
95ccdf4c20 | ||
|
|
291ef4bb75 | ||
|
|
c7461928f4 | ||
|
|
f5a5d3e96b | ||
|
|
b213aada80 | ||
|
|
403e5043cb | ||
|
|
bdbda459c0 | ||
|
|
0bf6d5f7fa | ||
|
|
f2ae14206a | ||
|
|
049159374d | ||
|
|
79f8e06fc1 | ||
|
|
370db2dce5 | ||
|
|
74325d5193 | ||
|
|
e6ea2e1379 | ||
|
|
cb85f3cfa6 | ||
|
|
aff8b89006 | ||
|
|
0ad8857368 | ||
|
|
46de82fe0b | ||
|
|
387348b615 | ||
|
|
8fd41faac6 | ||
|
|
8a15e1086b | ||
|
|
8502786648 | ||
|
|
d7107d49bf | ||
|
|
f42cb1e2f0 | ||
|
|
a9403b526f | ||
|
|
5f6291ed32 | ||
|
|
676ee80434 | ||
|
|
b54641ff86 | ||
|
|
6930c720ca | ||
|
|
611cfe9c11 | ||
|
|
507a1fbf34 | ||
|
|
087649de35 | ||
|
|
1f01f34377 | ||
|
|
295c43c6ff | ||
|
|
5a62d449bf | ||
|
|
1cbbe692b5 | ||
|
|
5bfe125160 | ||
|
|
1b6722324e | ||
|
|
a212f5f79d | ||
|
|
2879348d4b | ||
|
|
10097311c7 | ||
|
|
b8303a56b6 | ||
|
|
f9e468aebb | ||
|
|
b900dc6c09 | ||
|
|
bede55714c | ||
|
|
3ec426352f | ||
|
|
c6293e0ebd | ||
|
|
74e49a2326 | ||
|
|
69c451f69f | ||
|
|
9af8160e05 | ||
|
|
fdf4ea1e60 | ||
|
|
8c7afe012f | ||
|
|
6ba258a1f3 | ||
|
|
059c8cf1dd | ||
|
|
2b8955f583 | ||
|
|
23fde167f6 | ||
|
|
9470f82e9b | ||
|
|
1e88e241d4 | ||
|
|
0b8396c43c | ||
|
|
b602e4cb26 | ||
|
|
500393e596 | ||
|
|
7e4cbdb8b0 | ||
|
|
1d6d8ff8e6 | ||
|
|
8ea4cbe9ea | ||
|
|
45a0c36184 | ||
|
|
599c79bce2 | ||
|
|
eda6d40aa7 | ||
|
|
22b844c87f | ||
|
|
bdf7ec7c9b | ||
|
|
13db06d345 | ||
|
|
365ecd53c4 | ||
|
|
dc40c42815 | ||
|
|
49415efb8c | ||
|
|
dc234906f4 | ||
|
|
a7f51a7c85 | ||
|
|
9b0bd11d09 | ||
|
|
d7ad883f69 | ||
|
|
44308bfbe1 | ||
|
|
9b673d342f | ||
|
|
5517198310 | ||
|
|
819a04f3db | ||
|
|
f7202067a5 | ||
|
|
f20a0f20c9 | ||
|
|
02216d5941 | ||
|
|
79f2094322 | ||
|
|
afbc5cfa25 | ||
|
|
a3882a5e5c | ||
|
|
8194772388 | ||
|
|
12f18850dc | ||
|
|
816c1da248 | ||
|
|
5a78f19781 | ||
|
|
698837921b | ||
|
|
0f7fc7880b | ||
|
|
6e42f49b08 | ||
|
|
bdfe4e99ed | ||
|
|
85aa3927a6 | ||
|
|
33c4e4294f | ||
|
|
a89ab387cc | ||
|
|
e52b8c9d38 | ||
|
|
4dbf4795db | ||
|
|
8e4c6f68ae | ||
|
|
fddbd126ea | ||
|
|
ee5b26ab1c | ||
|
|
01e9a2ed67 | ||
|
|
10107c7b81 | ||
|
|
abe7876086 | ||
|
|
a759dff5fd | ||
|
|
9f8d081ef3 | ||
|
|
b8f6030441 | ||
|
|
267decf189 | ||
|
|
3a7492c68d | ||
|
|
273088b6a8 | ||
|
|
723b481f82 | ||
|
|
8a34bc46a2 | ||
|
|
bb4c16f4cd | ||
|
|
cf5b7c9763 | ||
|
|
f4414e98a2 | ||
|
|
a97bfd2a37 | ||
|
|
9fe0f5c984 | ||
|
|
718a7ffe8c | ||
|
|
f7106dc425 | ||
|
|
0698f8bcb8 | ||
|
|
3effd9ae6f | ||
|
|
7002cd1456 | ||
|
|
a15d488657 | ||
|
|
44651581b1 | ||
|
|
6d64f5b2b2 | ||
|
|
0c52a29ba8 | ||
|
|
bd8bd4f568 | ||
|
|
c75f29dc42 | ||
|
|
e493af7266 | ||
|
|
8d892e5924 | ||
|
|
053299f243 | ||
|
|
872659cc00 | ||
|
|
96978a6194 | ||
|
|
2253d3e301 | ||
|
|
5982ee84f7 | ||
|
|
dd2af6462d | ||
|
|
622643bf24 | ||
|
|
51cc55baac | ||
|
|
0b106e5649 | ||
|
|
8975d93ab3 | ||
|
|
e25b6c49f5 | ||
|
|
7a70c205de | ||
|
|
6045efa450 | ||
|
|
7b0816b3ee | ||
|
|
14e4ea736d | ||
|
|
32e7d9ab7f | ||
|
|
7f35ad9e45 | ||
|
|
60228f6e5a | ||
|
|
7604492b7f | ||
|
|
4680fe0e77 | ||
|
|
b4228c88d3 | ||
|
|
f6ef8c94c8 | ||
|
|
0d0dafa85e | ||
|
|
29aa34619f | ||
|
|
7f19f769c4 | ||
|
|
a40e89d90a | ||
|
|
6b2350200a | ||
|
|
fb50b0f6e7 | ||
|
|
ab568229b5 | ||
|
|
7f9c1bd774 | ||
|
|
670a0978da |
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
110
.mvn/wrapper/MavenWrapperDownloader.java
vendored
@@ -1,110 +0,0 @@
|
||||
/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/
|
||||
|
||||
import java.net.*;
|
||||
import java.io.*;
|
||||
import java.nio.channels.*;
|
||||
import java.util.Properties;
|
||||
|
||||
public class MavenWrapperDownloader {
|
||||
|
||||
/**
|
||||
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
|
||||
*/
|
||||
private static final String DEFAULT_DOWNLOAD_URL =
|
||||
"https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar";
|
||||
|
||||
/**
|
||||
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
|
||||
* use instead of the default one.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
|
||||
".mvn/wrapper/maven-wrapper.properties";
|
||||
|
||||
/**
|
||||
* Path where the maven-wrapper.jar will be saved to.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_JAR_PATH =
|
||||
".mvn/wrapper/maven-wrapper.jar";
|
||||
|
||||
/**
|
||||
* Name of the property which should be used to override the default download url for the wrapper.
|
||||
*/
|
||||
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
|
||||
|
||||
public static void main(String args[]) {
|
||||
System.out.println("- Downloader started");
|
||||
File baseDirectory = new File(args[0]);
|
||||
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
|
||||
|
||||
// If the maven-wrapper.properties exists, read it and check if it contains a custom
|
||||
// wrapperUrl parameter.
|
||||
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
|
||||
String url = DEFAULT_DOWNLOAD_URL;
|
||||
if(mavenWrapperPropertyFile.exists()) {
|
||||
FileInputStream mavenWrapperPropertyFileInputStream = null;
|
||||
try {
|
||||
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
|
||||
Properties mavenWrapperProperties = new Properties();
|
||||
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
|
||||
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
|
||||
} catch (IOException e) {
|
||||
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
|
||||
} finally {
|
||||
try {
|
||||
if(mavenWrapperPropertyFileInputStream != null) {
|
||||
mavenWrapperPropertyFileInputStream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Ignore ...
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading from: : " + url);
|
||||
|
||||
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
|
||||
if(!outputFile.getParentFile().exists()) {
|
||||
if(!outputFile.getParentFile().mkdirs()) {
|
||||
System.out.println(
|
||||
"- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'");
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
|
||||
try {
|
||||
downloadFileFromURL(url, outputFile);
|
||||
System.out.println("Done");
|
||||
System.exit(0);
|
||||
} catch (Throwable e) {
|
||||
System.out.println("- Error downloading");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
|
||||
URL website = new URL(urlString);
|
||||
ReadableByteChannel rbc;
|
||||
rbc = Channels.newChannel(website.openStream());
|
||||
FileOutputStream fos = new FileOutputStream(destination);
|
||||
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
|
||||
fos.close();
|
||||
rbc.close();
|
||||
}
|
||||
|
||||
}
|
||||
43
CI.adoc
Normal file
43
CI.adoc
Normal file
@@ -0,0 +1,43 @@
|
||||
= Continuous Integration
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
== Running CI tasks locally
|
||||
|
||||
Since this pipeline is purely Docker-based, it's easy to:
|
||||
|
||||
* Debug what went wrong on your local machine.
|
||||
* Test out a a tweak to your test routine before sending it out.
|
||||
* Experiment against a new image before submitting your pull request.
|
||||
|
||||
All of these use cases are great reasons to essentially run what the CI server does on your local machine.
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
2. `cd spring-data-mongodb-github`
|
||||
+
|
||||
Next, run the tests from inside the container:
|
||||
+
|
||||
3. `./mvnw clean dependency:list test -Dsort -Dbundlor.enabled=false -B` (or with whatever profile you need to test out)
|
||||
|
||||
Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs.
|
||||
|
||||
If you need to package things up, do this:
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
2. `cd spring-data-mongodb-github`
|
||||
+
|
||||
Next, package things from inside the container doing this:
|
||||
+
|
||||
3. `./mvnw clean dependency:list package -Dsort -Dbundlor.enabled=false -B`
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
123
Jenkinsfile
vendored
Normal file
123
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
pipeline {
|
||||
agent none
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/2.1.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
buildDiscarder(logRotator(numToKeepStr: '14'))
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Test") {
|
||||
when {
|
||||
anyOf {
|
||||
branch '2.1.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: baseline") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
branch 'issue/*'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb-2.1 " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Release to artifactory with docs') {
|
||||
when {
|
||||
branch '2.1.x'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb-2.1 " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
changed {
|
||||
script {
|
||||
slackSend(
|
||||
color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger',
|
||||
channel: '#spring-data-dev',
|
||||
message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}")
|
||||
emailext(
|
||||
subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}",
|
||||
mimeType: 'text/html',
|
||||
recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']],
|
||||
body: "<a href=\"${env.BUILD_URL}\">${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}</a>")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
159
README.adoc
Normal file
159
README.adoc
Normal file
@@ -0,0 +1,159 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities.
|
||||
The Spring Data MongoDB project provides integration with the MongoDB document database.
|
||||
Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer.
|
||||
|
||||
== Code of Conduct
|
||||
|
||||
This project is governed by the link:CODE_OF_CONDUCT.adoc[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
|
||||
== Getting Started
|
||||
|
||||
Here is a quick teaser of an application using Spring Data Repositories in Java:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public interface PersonRepository extends CrudRepository<Person, Long> {
|
||||
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
List<Person> findByFirstnameLike(String firstname);
|
||||
}
|
||||
|
||||
@Service
|
||||
public class MyService {
|
||||
|
||||
private final PersonRepository repository;
|
||||
|
||||
public MyService(PersonRepository repository) {
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
Person person = new Person();
|
||||
person.setFirstname("Oliver");
|
||||
person.setLastname("Gierke");
|
||||
repository.save(person);
|
||||
|
||||
List<Person> lastNameResults = repository.findByLastname("Gierke");
|
||||
List<Person> firstNameResults = repository.findByFirstnameLike("Oli*");
|
||||
}
|
||||
}
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "springdata";
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
=== Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
|
||||
* Check the
|
||||
https://docs.spring.io/spring-data/mongodb/docs/current/reference/html/[reference documentation], and https://docs.spring.io/spring-data/mongodb/docs/current/api/[Javadocs].
|
||||
* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation.
|
||||
If you are just starting out with Spring, try one of the https://spring.io/guides[guides].
|
||||
* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features.
|
||||
* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`].
|
||||
You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
|
||||
* Report bugs with Spring Data MongoDB at https://jira.spring.io/browse/DATAMONGO[jira.spring.io/browse/DATAMONGO].
|
||||
|
||||
== Reporting Issues
|
||||
|
||||
Spring Data uses JIRA as issue tracking system to record bugs and feature requests. If you want to raise an issue, please follow the recommendations below:
|
||||
|
||||
* Before you log a bug, please search the
|
||||
https://jira.spring.io/browse/DATAMONGO[issue tracker] to see if someone has already reported the problem.
|
||||
* If the issue doesn’t already exist, https://jira.spring.io/browse/DATAMONGO[create a new issue].
|
||||
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using and JVM version.
|
||||
* If you need to paste code, or include a stack trace use JIRA `{code}…{code}` escapes before and after your text.
|
||||
* If possible try to create a test-case or project that replicates the issue. Attach a link to your code or a compressed file containing your code.
|
||||
|
||||
== Building from Source
|
||||
|
||||
You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
|
||||
You also need JDK 1.8.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ./mvnw clean install
|
||||
----
|
||||
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
|
||||
=== Building reference documentation
|
||||
|
||||
Building the documentation builds also the project without running tests.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ./mvnw clean install -Pdistribute
|
||||
----
|
||||
|
||||
The generated documentation is available from `target/site/reference/html/index.html`.
|
||||
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
== License
|
||||
|
||||
Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license].
|
||||
186
README.md
186
README.md
@@ -1,186 +0,0 @@
|
||||
[](https://projects.spring.io/spring-data-mongodb#quick-start)
|
||||
[](https://projects.spring.io/spring-data-mongodb#quick-start)
|
||||
|
||||
# Spring Data MongoDB
|
||||
|
||||
The primary goal of the [Spring Data](https://projects.spring.io/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer.
|
||||
|
||||
## Getting Help
|
||||
|
||||
For a comprehensive treatment of all the Spring Data MongoDB features, please refer to:
|
||||
|
||||
* the [User Guide](https://docs.spring.io/spring-data/mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](https://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](https://projects.spring.io/spring-data-mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](https://stackoverflow.com/questions/tagged/spring-data-mongodb).
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](https://projects.spring.io/).
|
||||
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
```
|
||||
|
||||
### MongoTemplate
|
||||
|
||||
MongoTemplate is the central support class for Mongo database operations. It provides:
|
||||
|
||||
* Basic POJO mapping support to and from BSON
|
||||
* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.)
|
||||
* Connection affinity callback
|
||||
* Exception translation into Spring's [technology agnostic DAO exception hierarchy](https://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions).
|
||||
|
||||
### Spring Data repositories
|
||||
|
||||
To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface.
|
||||
|
||||
For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below:
|
||||
|
||||
```java
|
||||
public interface PersonRepository extends CrudRepository<Person, Long> {
|
||||
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
List<Person> findByFirstnameLike(String firstname);
|
||||
}
|
||||
```
|
||||
|
||||
The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them.
|
||||
|
||||
You can have Spring automatically create a proxy for the interface by using the following JavaConfig:
|
||||
|
||||
```java
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "springdata";
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML:
|
||||
|
||||
```xml
|
||||
<bean id="template" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg>
|
||||
<bean class="com.mongodb.MongoClient">
|
||||
<constructor-arg value="localhost" />
|
||||
<constructor-arg value="27017" />
|
||||
</bean>
|
||||
</constructor-arg>
|
||||
<constructor-arg value="database" />
|
||||
</bean>
|
||||
|
||||
<mongo:repositories base-package="com.acme.repository" />
|
||||
```
|
||||
|
||||
This will find the repository interface and register a proxy object in the container. You can use it as shown below:
|
||||
|
||||
```java
|
||||
@Service
|
||||
public class MyService {
|
||||
|
||||
private final PersonRepository repository;
|
||||
|
||||
@Autowired
|
||||
public MyService(PersonRepository repository) {
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
Person person = new Person();
|
||||
person.setFirstname("Oliver");
|
||||
person.setLastname("Gierke");
|
||||
person = repository.save(person);
|
||||
|
||||
List<Person> lastNameResults = repository.findByLastname("Gierke");
|
||||
List<Person> firstNameResults = repository.findByFirstnameLike("Oli*");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### MongoDB 4.0 Transactions
|
||||
|
||||
As of version 4 MongoDB supports [Transactions](https://www.mongodb.com/transactions). Transactions are built on top of
|
||||
`ClientSessions` and therefore require an active session.
|
||||
|
||||
`MongoTransactionManager` is the gateway to the well known Spring transaction support. It allows applications to use
|
||||
[managed transaction features of Spring](https://docs.spring.io/spring/docs/current/spring-framework-reference/html/transaction.html).
|
||||
The `MongoTransactionManager` binds a `ClientSession` to the thread. `MongoTemplate` automatically detects those and operates on them accordingly.
|
||||
|
||||
```java
|
||||
@Configuration
|
||||
static class Config extends AbstractMongoConfiguration {
|
||||
|
||||
@Bean
|
||||
MongoTransactionManager transactionManager(MongoDbFactory dbFactory) {
|
||||
return new MongoTransactionManager(dbFactory);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
|
||||
@Component
|
||||
public class StateService {
|
||||
|
||||
@Transactional
|
||||
void someBusinessFunction(Step step) {
|
||||
|
||||
template.insert(step);
|
||||
|
||||
process(step);
|
||||
|
||||
template.update(Step.class).apply(Update.set("state", // ...
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
## Contributing to Spring Data
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](https://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate.
|
||||
* Create [JIRA](https://jira.spring.io/browse/DATAMONGO) tickets for bugs and new features and comment and vote on the ones that you are interested in.
|
||||
* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](https://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing.
|
||||
* Watch for upcoming articles on Spring by [subscribing](https://spring.io/blog) to spring.io.
|
||||
|
||||
Before we accept a non-trivial patch or pull request we will need you to [sign the Contributor License Agreement](https://cla.pivotal.io/sign/spring). Signing the contributor’s agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. If you forget to do so, you'll be reminded when you submit a pull request. Active contributors might be asked to join the core team, and given the ability to merge pull requests.
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM openjdk:11-jdk
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https
|
||||
|
||||
RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN echo "deb https://repo.mongodb.org/apt/debian stretch/mongodb-org/4.0 main" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.3 mongodb-org-server=4.0.3 mongodb-org-shell=4.0.3 mongodb-org-mongos=4.0.3 mongodb-org-tools=4.0.3
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM openjdk:8-jdk
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https
|
||||
|
||||
RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN echo "deb https://repo.mongodb.org/apt/debian stretch/mongodb-org/4.0 main" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.3 mongodb-org-server=4.0.3 mongodb-org-shell=4.0.3 mongodb-org-mongos=4.0.3 mongodb-org-tools=4.0.3
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,39 +0,0 @@
|
||||
== Running CI tasks locally
|
||||
|
||||
Since Concourse is built on top of Docker, it's easy to:
|
||||
|
||||
* Debug what went wrong on your local machine.
|
||||
* Test out a a tweak to your `test.sh` script before sending it out.
|
||||
* Experiment against a new image before submitting your pull request.
|
||||
|
||||
All of these use cases are great reasons to essentially run what Concourse does on your local machine.
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
Next, run the `test.sh` script from inside the container:
|
||||
+
|
||||
2. `PROFILE=none spring-data-mongodb-github/ci/test.sh`
|
||||
|
||||
Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs.
|
||||
|
||||
If you need to test the `build.sh` script, do this:
|
||||
|
||||
1. `mkdir /tmp/spring-data-mongodb-artifactory`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
|
||||
artifactory output directory at `spring-data-mongodb-artifactory`.
|
||||
+
|
||||
Next, run the `build.sh` script from inside the container:
|
||||
+
|
||||
3. `spring-data-mongodb-github/ci/build.sh`
|
||||
|
||||
IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about accidentally deploying anything.
|
||||
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
|
||||
and deliver them to artifactory.
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
15
ci/build.sh
15
ci/build.sh
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
[[ -d $PWD/maven && ! -d $HOME/.m2 ]] && ln -s $PWD/maven $HOME/.m2
|
||||
|
||||
spring_data_mongodb_artifactory=$(pwd)/spring-data-mongodb-artifactory
|
||||
|
||||
rm -rf $HOME/.m2/repository/org/springframework/data 2> /dev/null || :
|
||||
|
||||
cd spring-data-mongodb-github
|
||||
|
||||
./mvnw deploy \
|
||||
-Dmaven.test.skip=true \
|
||||
-DaltDeploymentRepository=distribution::default::file://${spring_data_mongodb_artifactory} \
|
||||
19
ci/build.yml
19
ci/build.yml
@@ -1,19 +0,0 @@
|
||||
---
|
||||
platform: linux
|
||||
|
||||
image_resource:
|
||||
type: docker-image
|
||||
source:
|
||||
repository: springci/spring-data-8-jdk-with-mongodb
|
||||
|
||||
inputs:
|
||||
- name: spring-data-mongodb-github
|
||||
|
||||
outputs:
|
||||
- name: spring-data-mongodb-artifactory
|
||||
|
||||
caches:
|
||||
- path: maven
|
||||
|
||||
run:
|
||||
path: spring-data-mongodb-github/ci/build.sh
|
||||
14
ci/openjdk8-mongodb-4.0/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.0/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.9 mongodb-org-server=4.0.9 mongodb-org-shell=4.0.9 mongodb-org-mongos=4.0.9 mongodb-org-tools=4.0.9
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 4B7C549A058F8B6B
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.1 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.1.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org-unstable=4.1.13 mongodb-org-unstable-server=4.1.13 mongodb-org-unstable-shell=4.1.13 mongodb-org-unstable-mongos=4.1.13 mongodb-org-unstable-tools=4.1.13
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/test.sh
14
ci/test.sh
@@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
mkdir -p /data/db
|
||||
mongod &
|
||||
|
||||
[[ -d $PWD/maven && ! -d $HOME/.m2 ]] && ln -s $PWD/maven $HOME/.m2
|
||||
|
||||
rm -rf $HOME/.m2/repository/org/springframework/data/mongodb 2> /dev/null || :
|
||||
|
||||
cd spring-data-mongodb-github
|
||||
|
||||
./mvnw clean dependency:list test -P${PROFILE} -Dsort
|
||||
16
ci/test.yml
16
ci/test.yml
@@ -1,16 +0,0 @@
|
||||
---
|
||||
platform: linux
|
||||
|
||||
image_resource:
|
||||
type: docker-image
|
||||
source:
|
||||
repository: springci/spring-data-8-jdk-with-mongodb
|
||||
|
||||
inputs:
|
||||
- name: spring-data-mongodb-github
|
||||
|
||||
caches:
|
||||
- path: maven
|
||||
|
||||
run:
|
||||
path: spring-data-mongodb-github/ci/test.sh
|
||||
45
pom.xml
45
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,20 +15,21 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
</modules>
|
||||
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.2.0.M3</springdata.commons>
|
||||
<mongo>3.10.2</mongo>
|
||||
<mongo.reactivestreams>1.11.0</mongo.reactivestreams>
|
||||
<springdata.commons>2.1.14.RELEASE</springdata.commons>
|
||||
<mongo>3.8.2</mongo>
|
||||
<mongo.reactivestreams>1.9.2</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -113,28 +114,34 @@
|
||||
</developers>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>benchmarks</id>
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
<module>spring-data-mongodb-benchmarks</module>
|
||||
</modules>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>release</id>
|
||||
<id>distribute</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.jfrog.buildinfo</groupId>
|
||||
<artifactId>artifactory-maven-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>benchmarks</id>
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
<module>spring-data-mongodb-benchmarks</module>
|
||||
</modules>
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
@@ -148,8 +155,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,7 +87,6 @@
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<testSourceDirectory>${project.build.sourceDirectory}</testSourceDirectory>
|
||||
<testClassesDirectory>${project.build.outputDirectory}</testClassesDirectory>
|
||||
<excludes>
|
||||
|
||||
7
spring-data-mongodb-cross-store/aop.xml
Normal file
7
spring-data-mongodb-cross-store/aop.xml
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<aspectj>
|
||||
<aspects>
|
||||
<aspect name="org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect" />
|
||||
<aspect name="org.springframework.data.mongodb.crossstore.MongoDocumentBacking" />
|
||||
</aspects>
|
||||
</aspectj>
|
||||
148
spring-data-mongodb-cross-store/pom.xml
Normal file
148
spring-data-mongodb-cross-store/pom.xml
Normal file
@@ -0,0 +1,148 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>2.1.1</jpa>
|
||||
<hibernate>5.2.1.Final</hibernate>
|
||||
<java-module-name>spring.data.mongodb.cross.store</java-module-name>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse.persistence</groupId>
|
||||
<artifactId>javax.persistence</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- For Tests -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-entitymanager</artifactId>
|
||||
<version>${hibernate}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<version>1.8.0.10</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
<version>${validation}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.2.4.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>aspectj-maven-plugin</artifactId>
|
||||
<version>1.6</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjtools</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
<goal>test-compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<outxml>true</outxml>
|
||||
<aspectLibraries>
|
||||
<aspectLibrary>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</aspectLibrary>
|
||||
</aspectLibraries>
|
||||
<complianceLevel>${source.level}</complianceLevel>
|
||||
<source>${source.level}</source>
|
||||
<target>${source.level}</target>
|
||||
<xmlConfigured>aop.xml</xmlConfigured>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,24 +13,16 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.util.json;
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
|
||||
/**
|
||||
* A value provider to retrieve bindable values by their parameter index.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ValueProvider {
|
||||
@Deprecated
|
||||
public interface DocumentBacked extends ChangeSetBacked {
|
||||
|
||||
/**
|
||||
* @param index parameter index to use.
|
||||
* @return can be {@literal null}.
|
||||
* @throws RuntimeException if the requested element does not exist.
|
||||
*/
|
||||
@Nullable
|
||||
Object getBindableValue(int index);
|
||||
}
|
||||
@@ -0,0 +1,214 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.Filters;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
this.mongoTemplate = mongoTemplate;
|
||||
}
|
||||
|
||||
public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) {
|
||||
this.entityManagerFactory = entityManagerFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass, Object id, final ChangeSet changeSet)
|
||||
throws DataAccessException, NotFoundException {
|
||||
|
||||
if (id == null) {
|
||||
log.debug("Unable to load MongoDB data for null id");
|
||||
return;
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entityClass);
|
||||
|
||||
final Document dbk = new Document();
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
|
||||
for (Document dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (cs == null) {
|
||||
log.debug("Flush: changeset was null, nothing to flush.");
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
if (mongoTemplate.getCollection(collName) == null) {
|
||||
mongoTemplate.createCollection(collName);
|
||||
}
|
||||
|
||||
for (String key : cs.getValues().keySet()) {
|
||||
if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) {
|
||||
Object value = cs.getValues().get(key);
|
||||
final Document dbQuery = new Document();
|
||||
dbQuery.put(ENTITY_ID, getPersistentId(entity, cs));
|
||||
dbQuery.put(ENTITY_CLASS, entity.getClass().getName());
|
||||
dbQuery.put(ENTITY_FIELD_NAME, key);
|
||||
final Document dbId = mongoTemplate.execute(collName, new CollectionCallback<Document>() {
|
||||
public Document doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
Document id = collection.find(dbQuery).first();
|
||||
return id;
|
||||
}
|
||||
});
|
||||
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
DeleteResult dr = collection.deleteMany(dbQuery);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
final Document dbDoc = new Document();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
if (dbId != null) {
|
||||
dbDoc.put("_id", dbId.get("_id"));
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
|
||||
if (dbId != null) {
|
||||
collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc);
|
||||
} else {
|
||||
|
||||
if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) {
|
||||
dbDoc.remove("_id");
|
||||
}
|
||||
collection.insertOne(dbDoc);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection the given entity type shall be persisted to.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getCollectionNameForEntity(Class<? extends ChangeSetBacked> entityClass) {
|
||||
return mongoTemplate.getCollectionName(entityClass);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.Transient;
|
||||
import javax.persistence.Entity;
|
||||
|
||||
import org.aspectj.lang.JoinPoint;
|
||||
import org.aspectj.lang.reflect.FieldSignature;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
import org.springframework.data.mongodb.crossstore.DocumentBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetBackedTransactionSynchronization;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister.NotFoundException;
|
||||
import org.springframework.data.crossstore.HashMapChangeSet;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
/**
|
||||
* Aspect to turn an object annotated with @Document into a persistent document using Mongo.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public aspect MongoDocumentBacking {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoDocumentBacking.class);
|
||||
|
||||
// Aspect shared config
|
||||
private ChangeSetPersister<Object> changeSetPersister;
|
||||
|
||||
public void setChangeSetPersister(ChangeSetPersister<Object> changeSetPersister) {
|
||||
this.changeSetPersister = changeSetPersister;
|
||||
}
|
||||
|
||||
// ITD to introduce N state to Annotated objects
|
||||
declare parents : (@Entity *) implements DocumentBacked;
|
||||
|
||||
// The annotated fields that will be persisted in MongoDB rather than with JPA
|
||||
declare @field: @RelatedDocument * (@Entity+ *).*:@Transient;
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Advise user-defined constructors of ChangeSetBacked objects to create a new
|
||||
// backing ChangeSet
|
||||
// -------------------------------------------------------------------------
|
||||
pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) :
|
||||
execution((DocumentBacked+).new(..)) &&
|
||||
!execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity);
|
||||
|
||||
pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) :
|
||||
execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity) &&
|
||||
args(cs);
|
||||
|
||||
protected pointcut entityFieldGet(DocumentBacked entity) :
|
||||
get(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
!get(* DocumentBacked.*);
|
||||
|
||||
protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) :
|
||||
set(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
args(newVal) &&
|
||||
!set(* DocumentBacked.*);
|
||||
|
||||
// intercept EntityManager.merge calls
|
||||
public pointcut entityManagerMerge(EntityManager em, Object entity) :
|
||||
call(* EntityManager.merge(Object)) &&
|
||||
target(em) &&
|
||||
args(entity);
|
||||
|
||||
// intercept EntityManager.remove calls
|
||||
// public pointcut entityManagerRemove(EntityManager em, Object entity) :
|
||||
// call(* EntityManager.remove(Object)) &&
|
||||
// target(em) &&
|
||||
// args(entity);
|
||||
|
||||
// move changeSet from detached entity to the newly merged persistent object
|
||||
Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) {
|
||||
Object mergedEntity = proceed(em, entity);
|
||||
if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) {
|
||||
((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet();
|
||||
}
|
||||
return mergedEntity;
|
||||
}
|
||||
|
||||
// clear changeSet from removed entity
|
||||
// Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) {
|
||||
// if (entity instanceof DocumentBacked) {
|
||||
// removeChangeSetValues((DocumentBacked)entity);
|
||||
// }
|
||||
// return proceed(em, entity);
|
||||
// }
|
||||
|
||||
private static void removeChangeSetValues(DocumentBacked entity) {
|
||||
LOGGER.debug("Removing all change-set values for " + entity);
|
||||
ChangeSet nulledCs = new HashMapChangeSet();
|
||||
DocumentBacked documentEntity = (DocumentBacked) entity;
|
||||
@SuppressWarnings("unchecked")
|
||||
ChangeSetPersister<Object> changeSetPersister = (ChangeSetPersister<Object>) documentEntity.itdChangeSetPersister;
|
||||
try {
|
||||
changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(),
|
||||
documentEntity.getChangeSet());
|
||||
} catch (DataAccessException e) {
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
for (String key : entity.getChangeSet().getValues().keySet()) {
|
||||
nulledCs.set(key, null);
|
||||
}
|
||||
entity.setChangeSet(nulledCs);
|
||||
}
|
||||
|
||||
before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) {
|
||||
LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass());
|
||||
// Populate all ITD fields
|
||||
entity.setChangeSet(new HashMapChangeSet());
|
||||
entity.itdChangeSetPersister = changeSetPersister;
|
||||
entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity);
|
||||
// registerTransactionSynchronization(entity);
|
||||
}
|
||||
|
||||
private static void registerTransactionSynchronization(DocumentBacked entity) {
|
||||
if (TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Adding transaction synchronization for " + entity);
|
||||
}
|
||||
TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization already active for " + entity);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization is not active for " + entity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// ChangeSet-related mixins
|
||||
// -------------------------------------------------------------------------
|
||||
// Introduced field
|
||||
@Transient
|
||||
private ChangeSet DocumentBacked.changeSet;
|
||||
|
||||
@Transient
|
||||
private ChangeSetPersister<?> DocumentBacked.itdChangeSetPersister;
|
||||
|
||||
@Transient
|
||||
private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization;
|
||||
|
||||
public void DocumentBacked.setChangeSet(ChangeSet cs) {
|
||||
this.changeSet = cs;
|
||||
}
|
||||
|
||||
public ChangeSet DocumentBacked.getChangeSet() {
|
||||
return changeSet;
|
||||
}
|
||||
|
||||
// Flush the entity state to the persistent store
|
||||
public void DocumentBacked.flush() {
|
||||
Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
itdChangeSetPersister.persistState(this, this.changeSet);
|
||||
}
|
||||
|
||||
public Object DocumentBacked.get_persistent_id() {
|
||||
return itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
}
|
||||
|
||||
// lifecycle methods
|
||||
@javax.persistence.PostPersist
|
||||
public void DocumentBacked.itdPostPersist() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName());
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PreUpdate
|
||||
public void DocumentBacked.itdPreUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostUpdate
|
||||
public void DocumentBacked.itdPostUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostRemove
|
||||
public void DocumentBacked.itdPostRemove() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
removeChangeSetValues(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostLoad
|
||||
public void DocumentBacked.itdPostLoad() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field reads to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity): entityFieldGet(entity) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet());
|
||||
if (entity.getChangeSet().getValues().get(propName) == null) {
|
||||
try {
|
||||
this.changeSetPersister
|
||||
.getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet());
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
}
|
||||
Object fValue = entity.getChangeSet().getValues().get(propName);
|
||||
if (fValue != null) {
|
||||
return fValue;
|
||||
}
|
||||
return proceed(entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field writes to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]");
|
||||
entity.getChangeSet().set(propName, newVal);
|
||||
return proceed(entity, newVal);
|
||||
}
|
||||
|
||||
Field field(JoinPoint joinPoint) {
|
||||
FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature();
|
||||
return fieldSignature.getField();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD })
|
||||
public @interface RelatedDocument {
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Infrastructure for Spring Data's MongoDB cross store support.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
@@ -0,0 +1,195 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.crossstore.test.Address;
|
||||
import org.springframework.data.mongodb.crossstore.test.Person;
|
||||
import org.springframework.data.mongodb.crossstore.test.Resume;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
/**
|
||||
* Integration tests for MongoDB cross-store persistence (mainly {@link MongoChangeSetPersister}).
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:/META-INF/spring/applicationContext.xml")
|
||||
public class CrossStoreMongoTests {
|
||||
|
||||
@Autowired MongoTemplate mongoTemplate;
|
||||
|
||||
@PersistenceContext EntityManager entityManager;
|
||||
|
||||
@Autowired PlatformTransactionManager transactionManager;
|
||||
TransactionTemplate txTemplate;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
txTemplate = new TransactionTemplate(transactionManager);
|
||||
|
||||
clearData(Person.class);
|
||||
|
||||
Address address = new Address(12, "MAin St.", "Boston", "MA", "02101");
|
||||
|
||||
Resume resume = new Resume();
|
||||
resume.addEducation("Skanstulls High School, 1975");
|
||||
resume.addEducation("Univ. of Stockholm, 1980");
|
||||
resume.addJob("DiMark, DBA, 1990-2000");
|
||||
resume.addJob("VMware, Developer, 2007-");
|
||||
|
||||
final Person person = new Person("Thomas", 20);
|
||||
person.setAddress(address);
|
||||
person.setResume(resume);
|
||||
person.setId(1L);
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.persist(person);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.remove(entityManager.find(Person.class, 1L));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void clearData(Class<?> domainType) {
|
||||
|
||||
String collectionName = mongoTemplate.getCollectionName(domainType);
|
||||
mongoTemplate.dropCollection(collectionName);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testReadJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testUpdatedJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
found.setAge(44);
|
||||
found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006");
|
||||
|
||||
entityManager.merge(found);
|
||||
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; "
|
||||
+ "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeJpaEntityWithMongoDocument() {
|
||||
|
||||
final Person detached = entityManager.find(Person.class, 1L);
|
||||
entityManager.detach(detached);
|
||||
detached.getResume().addJob("TargetRx, Developer, 2000-2005");
|
||||
|
||||
Person merged = txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person result = entityManager.merge(detached);
|
||||
entityManager.flush();
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
Assert.assertTrue(detached.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
Assert.assertTrue(merged.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
final Person updated = entityManager.find(Person.class, 1L);
|
||||
Assert.assertTrue(updated.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveJpaEntityWithMongoDocument() {
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person p2 = new Person("Thomas", 20);
|
||||
Resume r2 = new Resume();
|
||||
r2.addEducation("Skanstulls High School, 1975");
|
||||
r2.addJob("DiMark, DBA, 1990-2000");
|
||||
p2.setResume(r2);
|
||||
p2.setId(2L);
|
||||
entityManager.persist(p2);
|
||||
Person p3 = new Person("Thomas", 20);
|
||||
Resume r3 = new Resume();
|
||||
r3.addEducation("Univ. of Stockholm, 1980");
|
||||
r3.addJob("VMware, Developer, 2007-");
|
||||
p3.setResume(r3);
|
||||
p3.setId(3L);
|
||||
entityManager.persist(p3);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
final Person found2 = entityManager.find(Person.class, 2L);
|
||||
entityManager.remove(found2);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
boolean weFound3 = false;
|
||||
|
||||
for (Document dbo : this.mongoTemplate.getCollection(mongoTemplate.getCollectionName(Person.class)).find()) {
|
||||
Assert.assertTrue(!dbo.get("_entity_id").equals(2L));
|
||||
if (dbo.get("_entity_id").equals(3L)) {
|
||||
weFound3 = true;
|
||||
}
|
||||
}
|
||||
Assert.assertTrue(weFound3);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
public class Address {
|
||||
|
||||
private Integer streetNumber;
|
||||
private String streetName;
|
||||
private String city;
|
||||
private String state;
|
||||
private String zip;
|
||||
|
||||
public Address(Integer streetNumber, String streetName, String city, String state, String zip) {
|
||||
super();
|
||||
this.streetNumber = streetNumber;
|
||||
this.streetName = streetName;
|
||||
this.city = city;
|
||||
this.state = state;
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
public Integer getStreetNumber() {
|
||||
return streetNumber;
|
||||
}
|
||||
|
||||
public void setStreetNumber(Integer streetNumber) {
|
||||
this.streetNumber = streetNumber;
|
||||
}
|
||||
|
||||
public String getStreetName() {
|
||||
return streetName;
|
||||
}
|
||||
|
||||
public void setStreetName(String streetName) {
|
||||
this.streetName = streetName;
|
||||
}
|
||||
|
||||
public String getCity() {
|
||||
return city;
|
||||
}
|
||||
|
||||
public void setCity(String city) {
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
public String getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void setState(String state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public String getZip() {
|
||||
return zip;
|
||||
}
|
||||
|
||||
public void setZip(String zip) {
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
Long id;
|
||||
|
||||
private String name;
|
||||
|
||||
private int age;
|
||||
|
||||
private java.util.Date birthDate;
|
||||
|
||||
@RelatedDocument
|
||||
private Address address;
|
||||
|
||||
@RelatedDocument
|
||||
private Resume resume;
|
||||
|
||||
public Person() {
|
||||
}
|
||||
|
||||
public Person(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
this.birthDate = new java.util.Date();
|
||||
}
|
||||
|
||||
public void birthday() {
|
||||
++age;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
public void setAge(int age) {
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public java.util.Date getBirthDate() {
|
||||
return birthDate;
|
||||
}
|
||||
|
||||
public void setBirthDate(java.util.Date birthDate) {
|
||||
this.birthDate = birthDate;
|
||||
}
|
||||
|
||||
public Resume getResume() {
|
||||
return resume;
|
||||
}
|
||||
|
||||
public void setResume(Resume resume) {
|
||||
this.resume = resume;
|
||||
}
|
||||
|
||||
public Address getAddress() {
|
||||
return address;
|
||||
}
|
||||
|
||||
public void setAddress(Address address) {
|
||||
this.address = address;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
@Document
|
||||
public class Resume {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(Resume.class);
|
||||
|
||||
@Id
|
||||
private ObjectId id;
|
||||
|
||||
private String education = "";
|
||||
|
||||
private String jobs = "";
|
||||
|
||||
public String getId() {
|
||||
return id.toString();
|
||||
}
|
||||
|
||||
public String getEducation() {
|
||||
return education;
|
||||
}
|
||||
|
||||
public void addEducation(String education) {
|
||||
LOGGER.debug("Adding education " + education);
|
||||
this.education = this.education + (this.education.length() > 0 ? "; " : "") + education;
|
||||
}
|
||||
|
||||
public String getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public void addJob(String job) {
|
||||
LOGGER.debug("Adding job " + job);
|
||||
this.jobs = this.jobs + (this.jobs.length() > 0 ? "; " : "") + job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Resume [education=" + education + ", jobs=" + jobs + "]";
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<persistence xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
version="2.0"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd">
|
||||
<persistence-unit name="test" transaction-type="RESOURCE_LOCAL">
|
||||
<provider>org.hibernate.ejb.HibernatePersistence</provider>
|
||||
<class>org.springframework.data.mongodb.crossstore.test.Person</class>
|
||||
<properties>
|
||||
<property name="hibernate.dialect" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
<!--value='create' to build a new database on each run; value='update' to modify an existing database; value='create-drop' means the same as 'create' but also drops tables when Hibernate closes; value='validate' makes no changes to the database-->
|
||||
<property name="hibernate.hbm2ddl.auto" value="update"/>
|
||||
<property name="hibernate.ejb.naming_strategy" value="org.hibernate.cfg.ImprovedNamingStrategy"/>
|
||||
</properties>
|
||||
</persistence-unit>
|
||||
</persistence>
|
||||
@@ -0,0 +1,72 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:tx="http://www.springframework.org/schema/tx"
|
||||
xmlns:jdbc="http://www.springframework.org/schema/jdbc"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/jdbc https://www.springframework.org/schema/jdbc/spring-jdbc-3.0.xsd
|
||||
http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||
http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx-3.0.xsd
|
||||
http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context-3.0.xsd">
|
||||
|
||||
<context:spring-configured/>
|
||||
|
||||
<context:component-scan base-package="org.springframework.persistence.mongodb.test">
|
||||
<context:exclude-filter expression="org.springframework.stereotype.Controller" type="annotation"/>
|
||||
</context:component-scan>
|
||||
|
||||
<mongo:mapping-converter/>
|
||||
|
||||
<!-- Mongo config -->
|
||||
<bean id="mongoClient" class="org.springframework.data.mongodb.core.MongoClientFactoryBean">
|
||||
<property name="host" value="localhost"/>
|
||||
<property name="port" value="27017"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoDbFactory" class="org.springframework.data.mongodb.core.SimpleMongoDbFactory">
|
||||
<constructor-arg name="mongoClient" ref="mongoClient"/>
|
||||
<constructor-arg name="databaseName" value="database"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg name="mongoDbFactory" ref="mongoDbFactory"/>
|
||||
<constructor-arg name="mongoConverter" ref="mappingConverter"/>
|
||||
</bean>
|
||||
|
||||
<bean class="org.springframework.data.mongodb.core.MongoExceptionTranslator"/>
|
||||
|
||||
<!-- Mongo aspect config -->
|
||||
<bean class="org.springframework.data.mongodb.crossstore.MongoDocumentBacking"
|
||||
factory-method="aspectOf">
|
||||
<property name="changeSetPersister" ref="mongoChangeSetPersister"/>
|
||||
</bean>
|
||||
<bean id="mongoChangeSetPersister"
|
||||
class="org.springframework.data.mongodb.crossstore.MongoChangeSetPersister">
|
||||
<property name="mongoTemplate" ref="mongoTemplate"/>
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<jdbc:embedded-database id="dataSource" type="HSQL">
|
||||
</jdbc:embedded-database>
|
||||
|
||||
<bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager">
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<tx:annotation-driven mode="aspectj" transaction-manager="transactionManager"/>
|
||||
|
||||
<bean class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean" id="entityManagerFactory">
|
||||
<property name="persistenceUnitName" value="test"/>
|
||||
<property name="dataSource" ref="dataSource"/>
|
||||
<property name="jpaVendorAdapter">
|
||||
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
|
||||
<property name="showSql" value="true"/>
|
||||
<property name="generateDdl" value="true"/>
|
||||
<property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d %5p %40.40c:%4L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!--
|
||||
<logger name="org.springframework" level="debug" />
|
||||
-->
|
||||
|
||||
<root level="error">
|
||||
<appender-ref ref="console" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
||||
@@ -1,6 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -14,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,22 +28,11 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>wagon-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.M3</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -83,14 +83,14 @@
|
||||
|
||||
<!-- reactive -->
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
<version>${mongo.reactivestreams}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-async</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
@@ -107,7 +107,7 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
@@ -119,14 +119,14 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava-reactive-streams</artifactId>
|
||||
<version>${rxjava-reactive-streams}</version>
|
||||
<optional>true</optional>
|
||||
@@ -264,27 +264,20 @@
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-reflect</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-core</artifactId>
|
||||
<version>${kotlin-coroutines}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-reactor</artifactId>
|
||||
<version>${kotlin-coroutines}</version>
|
||||
<optional>true</optional>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-test</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -329,7 +322,6 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<useFile>false</useFile>
|
||||
<includes>
|
||||
<include>**/*Tests.java</include>
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Copyright 2010-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Exception being thrown in case we cannot connect to a MongoDB instance.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException {
|
||||
|
||||
private final UserCredentials credentials;
|
||||
private final @Nullable String database;
|
||||
|
||||
private static final long serialVersionUID = 1172099106475265589L;
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
this.database = null;
|
||||
this.credentials = UserCredentials.NO_CREDENTIALS;
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg) {
|
||||
this(msg, null, UserCredentials.NO_CREDENTIALS);
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, @Nullable String database, UserCredentials credentials) {
|
||||
super(msg);
|
||||
this.database = database;
|
||||
this.credentials = credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public UserCredentials getCredentials() {
|
||||
return this.credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the database trying to be accessed.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
}
|
||||
@@ -51,6 +51,7 @@ import org.springframework.core.type.filter.AssignableTypeFilter;
|
||||
import org.springframework.core.type.filter.TypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
@@ -101,6 +102,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id);
|
||||
|
||||
createIsNewStrategyFactoryBeanDefinition(ctxRef, parserContext, element);
|
||||
|
||||
// Need a reference to a Mongo instance
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
if (!StringUtils.hasText(dbFactoryRef)) {
|
||||
@@ -345,6 +348,20 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String createIsNewStrategyFactoryBeanDefinition(String mappingContextRef, ParserContext context,
|
||||
Element element) {
|
||||
|
||||
BeanDefinitionBuilder mappingContextStrategyFactoryBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(MappingContextIsNewStrategyFactory.class);
|
||||
mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef);
|
||||
|
||||
BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context);
|
||||
context.registerBeanComponent(
|
||||
builder.getComponent(mappingContextStrategyFactoryBuilder, IS_NEW_STRATEGY_FACTORY_BEAN_NAME));
|
||||
|
||||
return IS_NEW_STRATEGY_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches.
|
||||
*
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
@@ -27,12 +28,17 @@ import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.context.PersistentEntities;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.support.CachingIsNewStrategyFactory;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -81,11 +87,23 @@ public abstract class MongoConfigurationSupport {
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}.
|
||||
*
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException {
|
||||
|
||||
return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(
|
||||
new PersistentEntities(Arrays.<MappingContext<?, ?>> asList(new MappingContext[] { mongoMappingContext() }))));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
|
||||
@@ -172,16 +190,4 @@ public abstract class MongoConfigurationSupport {
|
||||
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
|
||||
: PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal true} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will be set to {@literal false}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@@ -27,7 +26,6 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoCredential;
|
||||
@@ -80,23 +78,12 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0]));
|
||||
} else if ("MONGODB-CR".equals(authMechanism)) {
|
||||
} else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
|
||||
Method createCRCredentialMethod = ReflectionUtils.findMethod(MongoCredential.class,
|
||||
"createMongoCRCredential", String.class, String.class, char[].class);
|
||||
|
||||
if (createCRCredentialMethod == null) {
|
||||
throw new IllegalArgumentException("MONGODB-CR is no longer supported.");
|
||||
}
|
||||
|
||||
MongoCredential credential = MongoCredential.class
|
||||
.cast(ReflectionUtils.invokeMethod(createCRCredentialMethod, null, userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
credentials.add(credential);
|
||||
|
||||
credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
|
||||
@@ -31,7 +31,6 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Minsu Kim
|
||||
* @since 1.9
|
||||
*/
|
||||
public interface BulkOperations {
|
||||
@@ -136,29 +135,6 @@ public interface BulkOperations {
|
||||
*/
|
||||
BulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||
return replaceOne(query, replacement, FindAndReplaceOptions.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
|
||||
@@ -20,7 +20,6 @@ import lombok.EqualsAndHashCode;
|
||||
import java.time.Instant;
|
||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -85,19 +84,8 @@ public class ChangeStreamEvent<T> {
|
||||
@Nullable
|
||||
public Instant getTimestamp() {
|
||||
|
||||
return getBsonTimestamp() != null ? converter.getConversionService().convert(raw.getClusterTime(), Instant.class)
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ChangeStreamDocument#getClusterTime() cluster time}.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public BsonTimestamp getBsonTimestamp() {
|
||||
return raw != null ? raw.getClusterTime() : null;
|
||||
return raw != null && raw.getClusterTime() != null
|
||||
? converter.getConversionService().convert(raw.getClusterTime(), Instant.class) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,15 +21,12 @@ import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocument;
|
||||
@@ -50,7 +47,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private @Nullable Instant resumeTimestamp;
|
||||
|
||||
protected ChangeStreamOptions() {}
|
||||
|
||||
@@ -86,15 +83,7 @@ public class ChangeStreamOptions {
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Instant> getResumeTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, Instant.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<BsonTimestamp> getResumeBsonTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class));
|
||||
return Optional.ofNullable(resumeTimestamp);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -114,29 +103,6 @@ public class ChangeStreamOptions {
|
||||
return new ChangeStreamOptionsBuilder();
|
||||
}
|
||||
|
||||
private static <T> T asTimestampOfType(Object timestamp, Class<T> targetType) {
|
||||
return targetType.cast(doGetTimestamp(timestamp, targetType));
|
||||
}
|
||||
|
||||
private static <T> Object doGetTimestamp(Object timestamp, Class<T> targetType) {
|
||||
|
||||
if (ClassUtils.isAssignableValue(targetType, timestamp)) {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
if (timestamp instanceof Instant) {
|
||||
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
|
||||
}
|
||||
|
||||
if (timestamp instanceof BsonTimestamp) {
|
||||
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
"o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@link ChangeStreamOptions}.
|
||||
*
|
||||
@@ -149,7 +115,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private @Nullable Instant resumeTimestamp;
|
||||
|
||||
private ChangeStreamOptionsBuilder() {}
|
||||
|
||||
@@ -258,21 +224,6 @@ public class ChangeStreamOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cluster time to resume from.
|
||||
*
|
||||
* @param resumeTimestamp must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the built {@link ChangeStreamOptions}
|
||||
*/
|
||||
|
||||
@@ -25,7 +25,7 @@ import com.mongodb.client.FindIterable;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
interface CursorPreparer {
|
||||
public interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
|
||||
@@ -26,6 +26,7 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
@@ -37,8 +38,18 @@ import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.model.*;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOneModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
@@ -47,7 +58,7 @@ import com.mongodb.client.model.*;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Minsu Kim
|
||||
* @author Michail Nikolaev
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -112,7 +123,16 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
models.add(new InsertOneModel<>(getMappedObject(document)));
|
||||
if (document instanceof Document) {
|
||||
|
||||
models.add(new InsertOneModel<>((Document) document));
|
||||
return this;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
mongoOperations.getConverter().write(document, sink);
|
||||
|
||||
models.add(new InsertOneModel<>(sink));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -247,27 +267,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
||||
|
||||
models.add(
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(replacement), replaceOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
@@ -278,6 +277,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
try {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
return collection.bulkWrite(models.stream().map(this::mapWriteModel).collect(Collectors.toList()), bulkOptions);
|
||||
});
|
||||
} finally {
|
||||
@@ -355,17 +358,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
mongoOperations.getConverter().write(source, sink);
|
||||
return sink;
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
@@ -38,15 +38,17 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoTemplate
|
||||
* @see ReactiveMongoTemplate
|
||||
@@ -114,17 +116,6 @@ class EntityOperations {
|
||||
return context.getRequiredPersistentEntity(entityClass).getCollection();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection name to be used for the given entity.
|
||||
*
|
||||
* @param obj can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public String determineEntityCollectionName(@Nullable Object obj) {
|
||||
return null == obj ? null : determineCollectionName(obj.getClass());
|
||||
}
|
||||
|
||||
public Query getByIdInQuery(Collection<?> entities) {
|
||||
|
||||
MultiValueMap<String, Object> byIds = new LinkedMultiValueMap<>();
|
||||
@@ -164,15 +155,8 @@ class EntityOperations {
|
||||
|
||||
try {
|
||||
return Document.parse(source);
|
||||
} catch (org.bson.json.JsonParseException o_O) {
|
||||
} catch (JSONParseException | org.bson.json.JsonParseException o_O) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
} catch (RuntimeException o_O) {
|
||||
|
||||
// legacy 3.x exception
|
||||
if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
}
|
||||
throw o_O;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,16 +189,6 @@ class EntityOperations {
|
||||
*/
|
||||
Query getByIdQuery();
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to remove an entity by its {@literal id} and if applicable {@literal version}.
|
||||
*
|
||||
* @return the {@link Query} to use for removing the entity. Never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default Query getRemoveByQuery() {
|
||||
return isVersionedEntity() ? getQueryForVersion() : getByIdQuery();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to find the entity in its current version.
|
||||
*
|
||||
@@ -245,11 +219,9 @@ class EntityOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the version if the entity {@link #isVersionedEntity() has a version property}.
|
||||
* Returns the value of the version if the entity has a version property, {@literal null} otherwise.
|
||||
*
|
||||
* @return the entity version. Can be {@literal null}.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property. Make sure to check
|
||||
* {@link #isVersionedEntity()}.
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
Object getVersion();
|
||||
@@ -305,8 +277,8 @@ class EntityOperations {
|
||||
/**
|
||||
* Returns the current version value if the entity has a version property.
|
||||
*
|
||||
* @return the current version or {@literal null} in case it's uninitialized.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property.
|
||||
* @return the current version or {@literal null} in case it's uninitialized or the entity doesn't expose a version
|
||||
* property.
|
||||
*/
|
||||
@Nullable
|
||||
Number getVersion();
|
||||
@@ -508,10 +480,10 @@ class EntityOperations {
|
||||
public Query getQueryForVersion() {
|
||||
|
||||
MongoPersistentProperty idProperty = entity.getRequiredIdProperty();
|
||||
MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty();
|
||||
MongoPersistentProperty property = entity.getRequiredVersionProperty();
|
||||
|
||||
return new Query(Criteria.where(idProperty.getName()).is(getId())//
|
||||
.and(versionProperty.getName()).is(getVersion()));
|
||||
.and(property.getName()).is(getVersion()));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -632,22 +604,19 @@ class EntityOperations {
|
||||
public T populateIdIfNecessary(@Nullable Object id) {
|
||||
|
||||
if (id == null) {
|
||||
return null;
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
T bean = propertyAccessor.getBean();
|
||||
MongoPersistentProperty idProperty = entity.getIdProperty();
|
||||
|
||||
if (idProperty == null) {
|
||||
return bean;
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
if (identifierAccessor.getIdentifier() != null) {
|
||||
return bean;
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
propertyAccessor.setProperty(idProperty, id);
|
||||
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
|
||||
@@ -21,8 +21,9 @@ import com.mongodb.reactivestreams.client.FindPublisher;
|
||||
* Simple callback interface to allow customization of a {@link FindPublisher}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Konstantin Volivach
|
||||
*/
|
||||
interface FindPublisherPreparer {
|
||||
public interface FindPublisherPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
|
||||
@@ -92,7 +92,7 @@ public class MappedDocument {
|
||||
* mapped to the specific domain type.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @since 2.1.4
|
||||
*/
|
||||
class MappedUpdate implements UpdateDefinition {
|
||||
|
||||
@@ -137,14 +137,5 @@ public class MappedDocument {
|
||||
public Boolean isIsolated() {
|
||||
return delegate.isIsolated();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return delegate.getArrayFilters();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,210 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
* domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names}
|
||||
* and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.data.mongodb.core.MongoJsonSchemaCreator#createSchemaFor(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public MongoJsonSchema createSchemaFor(Class<?> type) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty nested : entity) {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
break;
|
||||
}
|
||||
|
||||
currentPath.add(nested);
|
||||
schemaProperties.add(computeSchemaForProperty(currentPath));
|
||||
}
|
||||
|
||||
return schemaProperties;
|
||||
}
|
||||
|
||||
private JsonSchemaProperty computeSchemaForProperty(List<MongoPersistentProperty> path) {
|
||||
|
||||
MongoPersistentProperty property = CollectionUtils.lastElement(path);
|
||||
|
||||
boolean required = isRequiredProperty(property);
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
} else if (property.isMap()) {
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentProperty property, boolean required) {
|
||||
|
||||
ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName());
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(path,
|
||||
mappingContext.getRequiredPersistentEntity(property));
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) {
|
||||
return createSchemaProperty(fieldName, type, required, Collections.emptyList());
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
|
||||
return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName()
|
||||
: property.getName();
|
||||
}
|
||||
|
||||
private boolean isRequiredProperty(PersistentProperty property) {
|
||||
return property.getType().isPrimitive();
|
||||
}
|
||||
|
||||
private Class<?> computeTargetType(PersistentProperty<?> property) {
|
||||
|
||||
if (!(property instanceof MongoPersistentProperty)) {
|
||||
return property.getType();
|
||||
}
|
||||
|
||||
MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property;
|
||||
if (!mongoProperty.isIdProperty()) {
|
||||
return mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
if (mongoProperty.hasExplicitWriteTarget()) {
|
||||
return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass();
|
||||
}
|
||||
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
|
||||
if (!required) {
|
||||
return property;
|
||||
}
|
||||
|
||||
return JsonSchemaProperty.required(property);
|
||||
}
|
||||
}
|
||||
@@ -41,8 +41,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
|
||||
private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build();
|
||||
|
||||
// TODO: Mongo Driver 4 - use application name insetad of description if not available
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getApplicationName();
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getDescription();
|
||||
private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost();
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost();
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS
|
||||
@@ -52,8 +51,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime();
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout();
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout();
|
||||
|
||||
// TODO: Mongo Driver 4 - check if available
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive();
|
||||
private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference();
|
||||
private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory();
|
||||
@@ -61,8 +58,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern();
|
||||
private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory();
|
||||
private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled();
|
||||
|
||||
// TODO: Mongo Driver 4 - remove this option
|
||||
private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans();
|
||||
private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency();
|
||||
private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency();
|
||||
@@ -79,7 +74,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
*
|
||||
* @param description
|
||||
*/
|
||||
// TODO: Mongo Driver 4 - deprecate that one and add application name
|
||||
public void setDescription(@Nullable String description) {
|
||||
this.description = description;
|
||||
}
|
||||
@@ -241,7 +235,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls if the driver should us an SSL connection. Defaults to {@literal false}.
|
||||
* This controls if the driver should us an SSL connection. Defaults to |@literal false}.
|
||||
*
|
||||
* @param ssl
|
||||
*/
|
||||
@@ -291,7 +285,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
.cursorFinalizerEnabled(cursorFinalizerEnabled) //
|
||||
.dbDecoderFactory(dbDecoderFactory) //
|
||||
.dbEncoderFactory(dbEncoderFactory) //
|
||||
.applicationName(description) // TODO: Mongo Driver 4 - use application name if description not available
|
||||
.description(description) //
|
||||
.heartbeatConnectTimeout(heartbeatConnectTimeout) //
|
||||
.heartbeatFrequency(heartbeatFrequency) //
|
||||
.heartbeatSocketTimeout(heartbeatSocketTimeout) //
|
||||
@@ -303,9 +297,8 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
.readPreference(readPreference) //
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.sslEnabled(ssl) //
|
||||
.socketFactory(socketFactoryToUse) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketKeepAlive(socketKeepAlive) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketFactory(socketFactoryToUse) //
|
||||
.socketKeepAlive(socketKeepAlive) //
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
.writeConcern(writeConcern).build();
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the
|
||||
* following mapping rules.
|
||||
* <p>
|
||||
* <strong>Required Properties</strong>
|
||||
* <ul>
|
||||
* <li>Properties of primitive type</li>
|
||||
* </ul>
|
||||
* <strong>Ignored Properties</strong>
|
||||
* <ul>
|
||||
* <li>All properties annotated with {@link org.springframework.data.annotation.Transient}</li>
|
||||
* </ul>
|
||||
* <strong>Property Type Mapping</strong>
|
||||
* <ul>
|
||||
* <li>{@link java.lang.Object} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.util.Arrays} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Collection} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Map} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}</li>
|
||||
* <li>Simple Types -> {@code type : 'the corresponding bson type' }</li>
|
||||
* <li>Domain Types -> {@code type : 'object', properties : {the types properties} }</li>
|
||||
* </ul>
|
||||
* <br />
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
* </p>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface MongoJsonSchemaCreator {
|
||||
|
||||
/**
|
||||
* Create the {@link MongoJsonSchema} for the given {@link Class type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
*
|
||||
* @param mongoConverter must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MongoConverter mongoConverter) {
|
||||
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -57,6 +57,10 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Mark Pollack
|
||||
@@ -289,12 +293,15 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
Set<String> getCollectionNames();
|
||||
|
||||
/**
|
||||
* Get a collection by name, creating it if it doesn't exist.
|
||||
* Get a {@link MongoCollection} by its name. The returned collection may not exists yet (except in local memory) and
|
||||
* is created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
* @return an existing collection or a newly created one.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
*/
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
@@ -1094,11 +1101,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1110,11 +1112,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1125,11 +1123,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1144,7 +1138,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
@@ -1206,7 +1200,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
@@ -1224,7 +1218,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
@@ -1238,7 +1232,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1254,6 +1250,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1266,8 +1265,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
*
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
@@ -1280,7 +1281,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1296,6 +1299,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1309,6 +1315,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1363,10 +1371,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
* Remove the given object from the collection by id.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
|
||||
@@ -1374,10 +1379,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
DeleteResult remove(Object object);
|
||||
|
||||
/**
|
||||
* Removes the given object from the given collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
* Removes the given object from the given collection.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
|
||||
|
||||
@@ -68,16 +68,7 @@ import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.JsonSchemaMapper;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.convert.*;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher;
|
||||
@@ -104,7 +95,6 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
@@ -256,14 +246,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
mappingContext = this.mongoConverter.getMappingContext();
|
||||
// We create indexes based on mapping events
|
||||
if (mappingContext instanceof MongoMappingContext) {
|
||||
|
||||
MongoMappingContext mappingContext = (MongoMappingContext) this.mappingContext;
|
||||
|
||||
if (mappingContext.isAutoIndexCreation()) {
|
||||
|
||||
indexCreator = new MongoPersistentEntityIndexCreator(mappingContext, this);
|
||||
eventPublisher = new MongoMappingEventPublisher(indexCreator);
|
||||
mappingContext.setApplicationEventPublisher(eventPublisher);
|
||||
indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, this);
|
||||
eventPublisher = new MongoMappingEventPublisher(indexCreator);
|
||||
if (mappingContext instanceof ApplicationEventPublisherAware) {
|
||||
((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -390,9 +376,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#executeAsStream(org.springframework.data.mongodb.core.query.Query, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> CloseableIterator<T> stream(final Query query, final Class<T> entityType) {
|
||||
|
||||
return stream(query, entityType, operations.determineCollectionName(entityType));
|
||||
public <T> CloseableIterator<T> stream(Query query, Class<T> entityType) {
|
||||
return stream(query, entityType, getCollectionName(entityType));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -400,11 +385,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#stream(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public <T> CloseableIterator<T> stream(final Query query, final Class<T> entityType, final String collectionName) {
|
||||
public <T> CloseableIterator<T> stream(Query query, Class<T> entityType, String collectionName) {
|
||||
return doStream(query, entityType, collectionName, entityType);
|
||||
}
|
||||
|
||||
protected <T> CloseableIterator<T> doStream(final Query query, final Class<?> entityType, final String collectionName,
|
||||
protected <T> CloseableIterator<T> doStream(Query query, final Class<?> entityType, String collectionName,
|
||||
Class<T> returnType) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
@@ -418,7 +403,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
public CloseableIterator<T> doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext.getRequiredPersistentEntity(entityType);
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext.getPersistentEntity(entityType);
|
||||
|
||||
Document mappedFields = getMappedFieldsObject(query.getFieldsObject(), persistentEntity, returnType);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), persistentEntity);
|
||||
@@ -542,7 +527,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
public <T> T execute(Class<?> entityClass, CollectionCallback<T> callback) {
|
||||
|
||||
Assert.notNull(entityClass, "EntityClass must not be null!");
|
||||
return execute(operations.determineCollectionName(entityClass), callback);
|
||||
return execute(getCollectionName(entityClass), callback);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -613,7 +598,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
@Nullable CollectionOptions collectionOptions) {
|
||||
|
||||
Assert.notNull(entityClass, "EntityClass must not be null!");
|
||||
return doCreateCollection(operations.determineCollectionName(entityClass),
|
||||
return doCreateCollection(getCollectionName(entityClass),
|
||||
convertToDocument(collectionOptions, entityClass));
|
||||
}
|
||||
|
||||
@@ -659,7 +644,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> boolean collectionExists(Class<T> entityClass) {
|
||||
return collectionExists(operations.determineCollectionName(entityClass));
|
||||
return collectionExists(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -688,7 +673,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> void dropCollection(Class<T> entityClass) {
|
||||
dropCollection(operations.determineCollectionName(entityClass));
|
||||
dropCollection(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -724,7 +709,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.Class)
|
||||
*/
|
||||
public IndexOperations indexOps(Class<?> entityClass) {
|
||||
return new DefaultIndexOperations(this, operations.determineCollectionName(entityClass), entityClass);
|
||||
return new DefaultIndexOperations(this, getCollectionName(entityClass), entityClass);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -740,7 +725,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class)
|
||||
*/
|
||||
public BulkOperations bulkOps(BulkMode bulkMode, Class<?> entityClass) {
|
||||
return bulkOps(bulkMode, entityClass, operations.determineCollectionName(entityClass));
|
||||
return bulkOps(bulkMode, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -775,7 +760,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
@Nullable
|
||||
@Override
|
||||
public <T> T findOne(Query query, Class<T> entityClass) {
|
||||
return findOne(query, entityClass, operations.determineCollectionName(entityClass));
|
||||
return findOne(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@@ -797,7 +782,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
@Override
|
||||
public boolean exists(Query query, Class<?> entityClass) {
|
||||
return exists(query, entityClass, operations.determineCollectionName(entityClass));
|
||||
return exists(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -827,7 +812,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
@Override
|
||||
public <T> List<T> find(Query query, Class<T> entityClass) {
|
||||
return find(query, entityClass, operations.determineCollectionName(entityClass));
|
||||
return find(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -848,7 +833,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
@Nullable
|
||||
@Override
|
||||
public <T> T findById(Object id, Class<T> entityClass) {
|
||||
return findById(id, entityClass, operations.determineCollectionName(entityClass));
|
||||
return findById(id, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@@ -870,7 +855,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
@Override
|
||||
public <T> List<T> findDistinct(Query query, String field, Class<?> entityClass, Class<T> resultClass) {
|
||||
return findDistinct(query, field, operations.determineCollectionName(entityClass), entityClass, resultClass);
|
||||
return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -953,7 +938,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
@Override
|
||||
public <T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass) {
|
||||
return geoNear(near, entityClass, operations.determineCollectionName(entityClass));
|
||||
return geoNear(near, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -976,11 +961,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(returnType, "ReturnType must not be null!");
|
||||
|
||||
String collection = StringUtils.hasText(collectionName) ? collectionName
|
||||
: operations.determineCollectionName(domainType);
|
||||
: getCollectionName(domainType);
|
||||
Document nearDocument = near.toDocument();
|
||||
|
||||
Document command = new Document("geoNear", collection);
|
||||
command.putAll(queryMapper.getMappedObject(nearDocument, Optional.empty()));
|
||||
command.putAll(nearDocument);
|
||||
|
||||
if (nearDocument.containsKey("query")) {
|
||||
Document query = (Document) nearDocument.get("query");
|
||||
@@ -1030,7 +1015,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
@Override
|
||||
public <T> T findAndModify(Query query, Update update, Class<T> entityClass) {
|
||||
return findAndModify(query, update, new FindAndModifyOptions(), entityClass,
|
||||
operations.determineCollectionName(entityClass));
|
||||
getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@@ -1042,7 +1027,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
@Nullable
|
||||
@Override
|
||||
public <T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass) {
|
||||
return findAndModify(query, update, options, entityClass, operations.determineCollectionName(entityClass));
|
||||
return findAndModify(query, update, options, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@@ -1106,7 +1091,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
@Nullable
|
||||
@Override
|
||||
public <T> T findAndRemove(Query query, Class<T> entityClass) {
|
||||
return findAndRemove(query, entityClass, operations.determineCollectionName(entityClass));
|
||||
return findAndRemove(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@@ -1125,7 +1110,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
public long count(Query query, Class<?> entityClass) {
|
||||
|
||||
Assert.notNull(entityClass, "Entity class must not be null!");
|
||||
return count(query, entityClass, operations.determineCollectionName(entityClass));
|
||||
return count(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1145,11 +1130,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
CountOptions options = new CountOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
options.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSkip() > 0) {
|
||||
options.skip((int) query.getSkip());
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
options.hint(Document.parse(query.getHint()));
|
||||
}
|
||||
|
||||
Document document = queryMapper.getMappedObject(query.getQueryObject(),
|
||||
@@ -1181,7 +1163,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(objectToSave, "ObjectToSave must not be null!");
|
||||
|
||||
ensureNotIterable(objectToSave);
|
||||
return insert(objectToSave, operations.determineEntityCollectionName(objectToSave));
|
||||
return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave)));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1276,7 +1258,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
Assert.notNull(batchToSave, "BatchToSave must not be null!");
|
||||
|
||||
return (Collection<T>) doInsertBatch(operations.determineCollectionName(entityClass), batchToSave,
|
||||
return (Collection<T>) doInsertBatch(getCollectionName(entityClass), batchToSave,
|
||||
this.mongoConverter);
|
||||
}
|
||||
|
||||
@@ -1310,9 +1292,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
continue;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(element.getClass());
|
||||
|
||||
String collection = entity.getCollection();
|
||||
String collection = getCollectionName(ClassUtils.getUserClass(element));
|
||||
List<T> collectionElements = elementsByCollection.get(collection);
|
||||
|
||||
if (null == collectionElements) {
|
||||
@@ -1376,7 +1356,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
public <T> T save(T objectToSave) {
|
||||
|
||||
Assert.notNull(objectToSave, "Object to save must not be null!");
|
||||
return save(objectToSave, operations.determineEntityCollectionName(objectToSave));
|
||||
return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave)));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1532,7 +1512,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
@Override
|
||||
public UpdateResult upsert(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(operations.determineCollectionName(entityClass), query, update, entityClass, true, false);
|
||||
return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1550,7 +1530,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
@Override
|
||||
public UpdateResult updateFirst(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(operations.determineCollectionName(entityClass), query, update, entityClass, false, false);
|
||||
return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1568,7 +1548,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
@Override
|
||||
public UpdateResult updateMulti(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(operations.determineCollectionName(entityClass), query, update, entityClass, false, true);
|
||||
return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1584,13 +1564,24 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return doUpdate(collectionName, query, update, entityClass, false, true);
|
||||
}
|
||||
|
||||
protected UpdateResult doUpdate(final String collectionName, final Query query, final UpdateDefinition update,
|
||||
protected UpdateResult doUpdate(final String collectionName, final Query query, final Update update,
|
||||
@Nullable final Class<?> entityClass, final boolean upsert, final boolean multi) {
|
||||
return doUpdate(collectionName, query, (UpdateDefinition) update, entityClass, upsert, multi);
|
||||
}
|
||||
|
||||
private UpdateResult doUpdate(final String collectionName, final Query query, final UpdateDefinition update,
|
||||
@Nullable final Class<?> entityClass, final boolean upsert, final boolean multi) {
|
||||
|
||||
Assert.notNull(collectionName, "CollectionName must not be null!");
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
if (query.isSorted() && LOGGER.isWarnEnabled()) {
|
||||
|
||||
LOGGER.warn("{} does not support sort ('{}'). Please use findAndModify() instead.",
|
||||
upsert ? "Upsert" : "UpdateFirst", serializeToJsonSafely(query.getSortObject()));
|
||||
}
|
||||
|
||||
return execute(collectionName, new CollectionCallback<UpdateResult>() {
|
||||
public UpdateResult doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
@@ -1602,11 +1593,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
UpdateOptions opts = new UpdateOptions();
|
||||
opts.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
opts.arrayFilters(
|
||||
update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
Document queryObj = new Document();
|
||||
|
||||
if (query != null) {
|
||||
@@ -1615,8 +1601,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(opts::collation);
|
||||
}
|
||||
|
||||
Document updateObj = update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
Document updateObj = update instanceof MappedUpdate ? update.getUpdateObject() : updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (multi && update.isIsolated() && !queryObj.containsKey("$isolated")) {
|
||||
queryObj.put("$isolated", 1);
|
||||
@@ -1651,8 +1636,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
});
|
||||
}
|
||||
|
||||
private void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity<?> persistentEntity,
|
||||
UpdateDefinition update) {
|
||||
private void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity<?> persistentEntity, UpdateDefinition update) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName();
|
||||
@@ -1667,7 +1651,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
|
||||
return remove(object, operations.determineCollectionName(object.getClass()));
|
||||
Query query = operations.forEntity(object).getByIdQuery();
|
||||
|
||||
return remove(query, object.getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1676,7 +1662,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
Query query = operations.forEntity(object).getRemoveByQuery();
|
||||
Query query = operations.forEntity(object).getByIdQuery();
|
||||
|
||||
return doRemove(collectionName, query, object.getClass(), false);
|
||||
}
|
||||
@@ -1688,7 +1674,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
@Override
|
||||
public DeleteResult remove(Query query, Class<?> entityClass) {
|
||||
return remove(query, entityClass, operations.determineCollectionName(entityClass));
|
||||
return remove(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1759,7 +1745,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
@Override
|
||||
public <T> List<T> findAll(Class<T> entityClass) {
|
||||
return findAll(entityClass, operations.determineCollectionName(entityClass));
|
||||
return findAll(entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1956,7 +1942,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, Class<O> outputType) {
|
||||
return aggregate(aggregation, operations.determineCollectionName(aggregation.getInputType()), outputType);
|
||||
return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -1979,7 +1965,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
return aggregate(aggregation, operations.determineCollectionName(inputType), outputType,
|
||||
return aggregate(aggregation, getCollectionName(inputType), outputType,
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
@@ -2010,7 +1996,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
@Override
|
||||
public <O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType) {
|
||||
return aggregateStream(aggregation, operations.determineCollectionName(aggregation.getInputType()), outputType);
|
||||
return aggregateStream(aggregation, getCollectionName(aggregation.getInputType()), outputType);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -2019,7 +2005,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
@Override
|
||||
public <O> CloseableIterator<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
return aggregateStream(aggregation, operations.determineCollectionName(inputType), outputType,
|
||||
return aggregateStream(aggregation, getCollectionName(inputType), outputType,
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
@@ -2045,7 +2031,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
@Override
|
||||
public <T> List<T> findAllAndRemove(Query query, Class<T> entityClass) {
|
||||
return findAllAndRemove(query, entityClass, operations.determineCollectionName(entityClass));
|
||||
return findAllAndRemove(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -2442,7 +2428,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
<S, T> List<T> doFind(String collectionName, Document query, Document fields, Class<S> sourceClass,
|
||||
Class<T> targetClass, CursorPreparer preparer) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(sourceClass);
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
|
||||
|
||||
Document mappedFields = getMappedFieldsObject(fields, entity, targetClass);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
@@ -2571,9 +2557,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(
|
||||
new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate,
|
||||
update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options),
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
new ReadDocumentCallback<>(readerToUse, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
@@ -2779,7 +2763,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
private Document getMappedFieldsObject(Document fields, MongoPersistentEntity<?> entity, Class<?> targetType) {
|
||||
private Document getMappedFieldsObject(Document fields, @Nullable MongoPersistentEntity<?> entity,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (entity == null) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
@@ -2930,16 +2919,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private final Document fields;
|
||||
private final Document sort;
|
||||
private final Document update;
|
||||
private final List<Document> arrayFilters;
|
||||
private final FindAndModifyOptions options;
|
||||
|
||||
public FindAndModifyCallback(Document query, Document fields, Document sort, Document update,
|
||||
List<Document> arrayFilters, FindAndModifyOptions options) {
|
||||
FindAndModifyOptions options) {
|
||||
this.query = query;
|
||||
this.fields = fields;
|
||||
this.sort = sort;
|
||||
this.update = update;
|
||||
this.arrayFilters = arrayFilters;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
@@ -2957,10 +2944,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
options.getCollation().map(Collation::toMongoCollation).ifPresent(opts::collation);
|
||||
|
||||
if (!arrayFilters.isEmpty()) {
|
||||
opts.arrayFilters(arrayFilters);
|
||||
}
|
||||
|
||||
return collection.findOneAndUpdate(query, update, opts);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,8 +66,12 @@ class PropertyOperations {
|
||||
projectionInformation.getInputProperties().forEach(it -> projectedFields.append(it.getName(), 1));
|
||||
}
|
||||
} else {
|
||||
mappingContext.getRequiredPersistentEntity(targetType).doWithProperties(
|
||||
(SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1));
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(targetType);
|
||||
if (entity != null) {
|
||||
entity.doWithProperties(
|
||||
(SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1));
|
||||
}
|
||||
}
|
||||
|
||||
return projectedFields;
|
||||
|
||||
@@ -116,11 +116,11 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio
|
||||
TypedAggregation<?> typedAggregation = (TypedAggregation<?>) aggregation;
|
||||
|
||||
if (typedAggregation.getInputType() != null) {
|
||||
return template.determineCollectionName(typedAggregation.getInputType());
|
||||
return template.getCollectionName(typedAggregation.getInputType());
|
||||
}
|
||||
}
|
||||
|
||||
return template.determineCollectionName(domainType);
|
||||
return template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -238,7 +238,7 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
|
||||
private String asString() {
|
||||
|
||||
@@ -96,7 +96,7 @@ class ReactiveInsertOperationSupport implements ReactiveInsertOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,7 +171,7 @@ class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,6 +56,10 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
* Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability
|
||||
* (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using
|
||||
* {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -277,12 +281,15 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
Flux<String> getCollectionNames();
|
||||
|
||||
/**
|
||||
* Get a collection by name, creating it if it doesn't exist.
|
||||
* Get a {@link MongoCollection} by name. The returned collection may not exists yet (except in local memory) and is
|
||||
* created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection.
|
||||
* @return an existing collection or a newly created one.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
*/
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
@@ -880,11 +887,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -896,11 +898,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -911,11 +909,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -930,7 +924,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
@@ -990,7 +984,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
@@ -1038,7 +1032,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
@@ -1056,7 +1050,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
@@ -1075,7 +1069,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
@@ -1093,7 +1087,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
@@ -1107,7 +1101,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1123,6 +1119,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1135,7 +1134,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1149,7 +1150,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1165,6 +1168,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1178,6 +1184,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
|
||||
@@ -31,6 +31,7 @@ import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
@@ -93,7 +94,6 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.Optionals;
|
||||
@@ -234,15 +234,12 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
if (this.mappingContext instanceof MongoMappingContext) {
|
||||
|
||||
MongoMappingContext mongoMappingContext = (MongoMappingContext) this.mappingContext;
|
||||
this.indexCreator = new ReactiveMongoPersistentEntityIndexCreator(mongoMappingContext, this::indexOps);
|
||||
this.eventPublisher = new MongoMappingEventPublisher(this.indexCreatorListener);
|
||||
|
||||
if (mongoMappingContext.isAutoIndexCreation()) {
|
||||
this.indexCreator = new ReactiveMongoPersistentEntityIndexCreator(mongoMappingContext, this::indexOps);
|
||||
this.eventPublisher = new MongoMappingEventPublisher(this.indexCreatorListener);
|
||||
|
||||
mongoMappingContext.setApplicationEventPublisher(this.eventPublisher);
|
||||
this.mappingContext.getPersistentEntities()
|
||||
.forEach(entity -> onCheckForIndexes(entity, subscriptionExceptionHandler));
|
||||
}
|
||||
mongoMappingContext.setApplicationEventPublisher(this.eventPublisher);
|
||||
this.mappingContext.getPersistentEntities()
|
||||
.forEach(entity -> onCheckForIndexes(entity, subscriptionExceptionHandler));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -378,12 +375,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class)
|
||||
*/
|
||||
public ReactiveIndexOperations indexOps(Class<?> entityClass) {
|
||||
return new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass), this.queryMapper,
|
||||
entityClass);
|
||||
return new DefaultReactiveIndexOperations(this, getCollectionName(entityClass), this.queryMapper, entityClass);
|
||||
}
|
||||
|
||||
public String getCollectionName(Class<?> entityClass) {
|
||||
return this.determineCollectionName(entityClass);
|
||||
return operations.determineCollectionName(entityClass);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -423,7 +419,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <T> Flux<T> execute(Class<?> entityClass, ReactiveCollectionCallback<T> action) {
|
||||
return createFlux(determineCollectionName(entityClass), action);
|
||||
return createFlux(getCollectionName(entityClass), action);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -609,7 +605,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<MongoCollection<Document>> createCollection(Class<T> entityClass) {
|
||||
return createCollection(determineCollectionName(entityClass));
|
||||
return createCollection(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -618,8 +614,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
public <T> Mono<MongoCollection<Document>> createCollection(Class<T> entityClass,
|
||||
@Nullable CollectionOptions collectionOptions) {
|
||||
return doCreateCollection(determineCollectionName(entityClass),
|
||||
convertToCreateCollectionOptions(collectionOptions, entityClass));
|
||||
return doCreateCollection(getCollectionName(entityClass), convertToCreateCollectionOptions(collectionOptions, entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -652,7 +647,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<Boolean> collectionExists(Class<T> entityClass) {
|
||||
return collectionExists(determineCollectionName(entityClass));
|
||||
return collectionExists(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -671,7 +666,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<Void> dropCollection(Class<T> entityClass) {
|
||||
return dropCollection(determineCollectionName(entityClass));
|
||||
return dropCollection(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -708,7 +703,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<T> findOne(Query query, Class<T> entityClass) {
|
||||
return findOne(query, entityClass, determineCollectionName(entityClass));
|
||||
return findOne(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -731,7 +726,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class)
|
||||
*/
|
||||
public Mono<Boolean> exists(Query query, Class<?> entityClass) {
|
||||
return exists(query, entityClass, determineCollectionName(entityClass));
|
||||
return exists(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -774,7 +769,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class)
|
||||
*/
|
||||
public <T> Flux<T> find(Query query, Class<T> entityClass) {
|
||||
return find(query, entityClass, determineCollectionName(entityClass));
|
||||
return find(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -796,7 +791,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<T> findById(Object id, Class<T> entityClass) {
|
||||
return findById(id, entityClass, determineCollectionName(entityClass));
|
||||
return findById(id, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -815,7 +810,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.Class, java.lang.Class)
|
||||
*/
|
||||
public <T> Flux<T> findDistinct(Query query, String field, Class<?> entityClass, Class<T> resultClass) {
|
||||
return findDistinct(query, field, determineCollectionName(entityClass), entityClass, resultClass);
|
||||
return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -910,7 +905,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <O> Flux<O> aggregate(TypedAggregation<?> aggregation, Class<O> outputType) {
|
||||
return aggregate(aggregation, determineCollectionName(aggregation.getInputType()), outputType);
|
||||
return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -920,7 +915,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
@Override
|
||||
public <O> Flux<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
return aggregate(aggregation, determineCollectionName(inputType), outputType,
|
||||
return aggregate(aggregation, getCollectionName(inputType), outputType,
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
@@ -986,7 +981,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass) {
|
||||
return geoNear(near, entityClass, determineCollectionName(entityClass));
|
||||
return geoNear(near, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1010,7 +1005,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
throw new InvalidDataAccessApiUsageException("Entity class must not be null!");
|
||||
}
|
||||
|
||||
String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(entityClass);
|
||||
String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(entityClass);
|
||||
Document nearDocument = near.toDocument();
|
||||
|
||||
Document command = new Document("geoNear", collection);
|
||||
@@ -1046,7 +1041,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass) {
|
||||
return findAndModify(query, update, new FindAndModifyOptions(), entityClass, determineCollectionName(entityClass));
|
||||
return findAndModify(query, update, new FindAndModifyOptions(), entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1062,7 +1057,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass) {
|
||||
return findAndModify(query, update, options, entityClass, determineCollectionName(entityClass));
|
||||
return findAndModify(query, update, options, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1121,7 +1116,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<T> findAndRemove(Query query, Class<T> entityClass) {
|
||||
return findAndRemove(query, entityClass, determineCollectionName(entityClass));
|
||||
return findAndRemove(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1142,7 +1137,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
Assert.notNull(entityClass, "Entity class must not be null!");
|
||||
|
||||
return count(query, entityClass, determineCollectionName(entityClass));
|
||||
return count(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1171,13 +1166,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
CountOptions options = new CountOptions();
|
||||
if (query != null) {
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
options.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSkip() > 0) {
|
||||
options.skip((int) query.getSkip());
|
||||
}
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
options.hint(Document.parse(query.getHint()));
|
||||
}
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
@@ -1206,7 +1197,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <T> Flux<T> insertAll(Mono<? extends Collection<? extends T>> batchToSave, Class<?> entityClass) {
|
||||
return insertAll(batchToSave, determineCollectionName(entityClass));
|
||||
return insertAll(batchToSave, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1230,7 +1221,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
Assert.notNull(objectToSave, "Object to insert must not be null!");
|
||||
|
||||
ensureNotIterable(objectToSave);
|
||||
return insert(objectToSave, determineEntityCollectionName(objectToSave));
|
||||
return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave)));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1276,7 +1267,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.Class)
|
||||
*/
|
||||
public <T> Flux<T> insert(Collection<? extends T> batchToSave, Class<?> entityClass) {
|
||||
return doInsertBatch(determineCollectionName(entityClass), batchToSave, this.mongoConverter);
|
||||
return doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1310,9 +1301,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
listToSave.forEach(element -> {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(element.getClass());
|
||||
|
||||
String collection = entity.getCollection();
|
||||
String collection = getCollectionName(element.getClass());
|
||||
List<T> collectionElements = elementsByCollection.computeIfAbsent(collection, k -> new ArrayList<>());
|
||||
|
||||
collectionElements.add(element);
|
||||
@@ -1392,7 +1381,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
public <T> Mono<T> save(T objectToSave) {
|
||||
|
||||
Assert.notNull(objectToSave, "Object to save must not be null!");
|
||||
return save(objectToSave, determineEntityCollectionName(objectToSave));
|
||||
return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave)));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1556,7 +1545,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class)
|
||||
*/
|
||||
public Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(determineCollectionName(entityClass), query, update, entityClass, true, false);
|
||||
return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1580,7 +1569,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class)
|
||||
*/
|
||||
public Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, false);
|
||||
return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1604,7 +1593,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class)
|
||||
*/
|
||||
public Mono<UpdateResult> updateMulti(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, true);
|
||||
return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1623,8 +1612,19 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return doUpdate(collectionName, query, update, entityClass, false, true);
|
||||
}
|
||||
|
||||
protected Mono<UpdateResult> doUpdate(String collectionName, Query query, @Nullable UpdateDefinition update,
|
||||
protected Mono<UpdateResult> doUpdate(String collectionName, Query query, @Nullable Update update,
|
||||
@Nullable Class<?> entityClass, boolean upsert, boolean multi) {
|
||||
return doUpdate(collectionName, query, (UpdateDefinition) update, entityClass, upsert, multi);
|
||||
}
|
||||
|
||||
private Mono<UpdateResult> doUpdate(String collectionName, Query query, @Nullable UpdateDefinition update,
|
||||
@Nullable Class<?> entityClass, boolean upsert, boolean multi) {
|
||||
|
||||
if (query.isSorted() && LOGGER.isWarnEnabled()) {
|
||||
|
||||
LOGGER.warn("{} does not support sort ('{}'). Please use findAndModify() instead.",
|
||||
upsert ? "Upsert" : "UpdateFirst", serializeToJsonSafely(query.getSortObject()));
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
|
||||
@@ -1649,11 +1649,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
UpdateOptions updateOptions = new UpdateOptions().upsert(upsert);
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(updateOptions::collation);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
updateOptions.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument)
|
||||
.map(it -> queryMapper.getMappedObject(it, entity)).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
if (!UpdateMapper.isUpdateObject(updateObj)) {
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
@@ -1702,7 +1697,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return false;
|
||||
}
|
||||
|
||||
return document.containsKey(persistentEntity.getRequiredVersionProperty().getFieldName());
|
||||
return document.containsKey(persistentEntity.getRequiredIdProperty().getFieldName());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1731,7 +1726,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
|
||||
return remove(operations.forEntity(object).getRemoveByQuery(), object.getClass());
|
||||
return remove(operations.forEntity(object).getByIdQuery(), object.getClass());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1743,7 +1738,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
return doRemove(collectionName, operations.forEntity(object).getRemoveByQuery(), object.getClass());
|
||||
return doRemove(collectionName, operations.forEntity(object).getByIdQuery(), object.getClass());
|
||||
}
|
||||
|
||||
private void assertUpdateableIdIfNotSet(Object value) {
|
||||
@@ -1780,7 +1775,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class)
|
||||
*/
|
||||
public Mono<DeleteResult> remove(Query query, Class<?> entityClass) {
|
||||
return remove(query, entityClass, determineCollectionName(entityClass));
|
||||
return remove(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1801,14 +1796,15 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
Document queryObject = query.getQueryObject();
|
||||
MongoPersistentEntity<?> entity = getPersistentEntity(entityClass);
|
||||
Document removeQuery = queryMapper.getMappedObject(queryObject, entity);
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
|
||||
maybeEmitEvent(new BeforeDeleteEvent<>(removeQuery, entityClass, collectionName));
|
||||
Document removeQuey = queryMapper.getMappedObject(queryObject, entity);
|
||||
|
||||
maybeEmitEvent(new BeforeDeleteEvent<>(removeQuey, entityClass, collectionName));
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass,
|
||||
null, removeQuery);
|
||||
null, removeQuey);
|
||||
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
@@ -1818,13 +1814,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.",
|
||||
new Object[] { serializeToJsonSafely(removeQuery), collectionName });
|
||||
new Object[] { serializeToJsonSafely(removeQuey), collectionName });
|
||||
}
|
||||
|
||||
if (query.getLimit() > 0 || query.getSkip() > 0) {
|
||||
|
||||
FindPublisher<Document> cursor = new QueryFindPublisherPreparer(query, entityClass)
|
||||
.prepare(collection.find(removeQuery)) //
|
||||
.prepare(collection.find(removeQuey)) //
|
||||
.projection(MappedDocument.getIdOnlyProjection());
|
||||
|
||||
return Flux.from(cursor) //
|
||||
@@ -1835,10 +1831,10 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return collectionToUse.deleteMany(MappedDocument.getIdIn(val), deleteOptions);
|
||||
});
|
||||
} else {
|
||||
return collectionToUse.deleteMany(removeQuery, deleteOptions);
|
||||
return collectionToUse.deleteMany(removeQuey, deleteOptions);
|
||||
}
|
||||
|
||||
}).doOnNext(it -> maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName))) //
|
||||
}).doOnNext(deleteResult -> maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName)))
|
||||
.next();
|
||||
}
|
||||
|
||||
@@ -1847,7 +1843,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class)
|
||||
*/
|
||||
public <T> Flux<T> findAll(Class<T> entityClass) {
|
||||
return findAll(entityClass, determineCollectionName(entityClass));
|
||||
return findAll(entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1875,7 +1871,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <T> Flux<T> findAllAndRemove(Query query, Class<T> entityClass) {
|
||||
return findAllAndRemove(query, entityClass, determineCollectionName(entityClass));
|
||||
return findAllAndRemove(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1893,7 +1889,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <T> Flux<T> tail(Query query, Class<T> entityClass) {
|
||||
return tail(query, entityClass, determineCollectionName(entityClass));
|
||||
return tail(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1939,7 +1935,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter).orElse(publisher);
|
||||
publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher);
|
||||
publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher);
|
||||
publisher = options.getResumeTimestamp().map(it -> new BsonTimestamp((int) it.getEpochSecond(), 0))
|
||||
.map(publisher::startAtOperationTime).orElse(publisher);
|
||||
publisher = publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument));
|
||||
|
||||
return Flux.from(publisher).map(document -> new ChangeStreamEvent<>(document, targetType, getConverter()));
|
||||
@@ -1975,8 +1972,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
public <T> Flux<T> mapReduce(Query filterQuery, Class<?> domainType, Class<T> resultType, String mapFunction,
|
||||
String reduceFunction, MapReduceOptions options) {
|
||||
|
||||
return mapReduce(filterQuery, domainType, determineCollectionName(domainType), resultType, mapFunction,
|
||||
reduceFunction, options);
|
||||
return mapReduce(filterQuery, domainType, getCollectionName(domainType), resultType, mapFunction, reduceFunction,
|
||||
options);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -2265,7 +2262,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
<S, T> Flux<T> doFind(String collectionName, Document query, Document fields, Class<S> sourceClass,
|
||||
Class<T> targetClass, FindPublisherPreparer preparer) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(sourceClass);
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
|
||||
|
||||
Document mappedFields = getMappedFieldsObject(fields, entity, targetClass);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
@@ -2279,7 +2276,12 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
private Document getMappedFieldsObject(Document fields, MongoPersistentEntity<?> entity, Class<?> targetType) {
|
||||
private Document getMappedFieldsObject(Document fields, @Nullable MongoPersistentEntity<?> entity,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (entity == null) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
@@ -2381,7 +2383,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
collectionName));
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options),
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName);
|
||||
});
|
||||
}
|
||||
@@ -2603,25 +2605,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return type == null ? null : mappingContext.getPersistentEntity(type);
|
||||
}
|
||||
|
||||
private <T> String determineEntityCollectionName(@Nullable T obj) {
|
||||
|
||||
if (null != obj) {
|
||||
return determineCollectionName(obj.getClass());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
String determineCollectionName(@Nullable Class<?> entityClass) {
|
||||
|
||||
if (entityClass == null) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"No class parameter provided, entity collection can't be determined!");
|
||||
}
|
||||
|
||||
return mappingContext.getRequiredPersistentEntity(entityClass).getCollection();
|
||||
}
|
||||
|
||||
private static MappingMongoConverter getDefaultMongoConverter() {
|
||||
|
||||
MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList());
|
||||
@@ -2765,7 +2748,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
private final Document fields;
|
||||
private final Document sort;
|
||||
private final Document update;
|
||||
private final List<Document> arrayFilters;
|
||||
private final FindAndModifyOptions options;
|
||||
|
||||
@Override
|
||||
@@ -2781,12 +2763,12 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return collection.findOneAndDelete(query, findOneAndDeleteOptions);
|
||||
}
|
||||
|
||||
FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort, arrayFilters);
|
||||
FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort);
|
||||
return collection.findOneAndUpdate(query, update, findOneAndUpdateOptions);
|
||||
}
|
||||
|
||||
private static FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOptions options, Document fields,
|
||||
Document sort, List<Document> arrayFilters) {
|
||||
private FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOptions options, Document fields,
|
||||
Document sort) {
|
||||
|
||||
FindOneAndUpdateOptions result = new FindOneAndUpdateOptions();
|
||||
|
||||
@@ -2799,7 +2781,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
}
|
||||
|
||||
result = options.getCollation().map(Collation::toMongoCollation).map(result::collation).orElse(result);
|
||||
result.arrayFilters(arrayFilters);
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -3200,7 +3181,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
// Double check type as Spring infrastructure does not consider nested generics
|
||||
if (entity instanceof MongoPersistentEntity) {
|
||||
|
||||
onCheckForIndexes((MongoPersistentEntity<?>) entity, subscriptionExceptionHandler);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,7 +112,7 @@ class ReactiveRemoveOperationSupport implements ReactiveRemoveOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -126,7 +126,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
return template.findAndModify(query, update, findAndModifyOptions, targetType, collectionName);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingFindAndReplace#findAndReplace()
|
||||
*/
|
||||
@@ -172,7 +172,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#replaceWith(java.lang.Object)
|
||||
*/
|
||||
@@ -185,7 +185,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
findAndReplaceOptions, replacement, targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@@ -216,7 +216,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
}
|
||||
|
||||
private String getCollectionName() {
|
||||
return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
|
||||
return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -496,7 +496,7 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
NestedDelegatingExpressionAggregationOperationContext nea = new NestedDelegatingExpressionAggregationOperationContext(
|
||||
context);
|
||||
context, Collections.singleton(as));
|
||||
return ((AggregationExpression) condition).toDocument(nea);
|
||||
}
|
||||
|
||||
|
||||
@@ -69,6 +69,11 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
|
||||
if (field.isInternal()) {
|
||||
return new DirectFieldReference(new ExposedField(field, true));
|
||||
}
|
||||
|
||||
return getReference(field, field.getTarget());
|
||||
}
|
||||
|
||||
|
||||
@@ -43,4 +43,12 @@ public interface Field {
|
||||
* @return
|
||||
*/
|
||||
boolean isAliased();
|
||||
|
||||
/**
|
||||
* @return true if the field name references a local value such as {@code $$this}.
|
||||
* @since 2.1.11
|
||||
*/
|
||||
default boolean isInternal() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,6 +283,11 @@ public final class Fields implements Iterable<Field> {
|
||||
return !getName().equals(getTarget());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInternal() {
|
||||
return getRaw().endsWith("$$this") || getRaw().endsWith("$$value");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} in case the field name starts with {@code $$}.
|
||||
* @since 1.10
|
||||
|
||||
@@ -15,9 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExpressionFieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -26,21 +29,25 @@ import org.springframework.util.Assert;
|
||||
* variable.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.10
|
||||
*/
|
||||
class NestedDelegatingExpressionAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final AggregationOperationContext delegate;
|
||||
private final Collection<Field> inners;
|
||||
|
||||
/**
|
||||
* Creates new {@link NestedDelegatingExpressionAggregationOperationContext}.
|
||||
*
|
||||
* @param referenceContext must not be {@literal null}.
|
||||
*/
|
||||
public NestedDelegatingExpressionAggregationOperationContext(AggregationOperationContext referenceContext) {
|
||||
NestedDelegatingExpressionAggregationOperationContext(AggregationOperationContext referenceContext,
|
||||
Collection<Field> inners) {
|
||||
|
||||
Assert.notNull(referenceContext, "Reference context must not be null!");
|
||||
this.delegate = referenceContext;
|
||||
this.inners = inners;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -58,7 +65,25 @@ class NestedDelegatingExpressionAggregationOperationContext implements Aggregati
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return new ExpressionFieldReference(delegate.getReference(field));
|
||||
|
||||
FieldReference reference = delegate.getReference(field);
|
||||
return isInnerVariableReference(field) ? new ExpressionFieldReference(delegate.getReference(field)) : reference;
|
||||
}
|
||||
|
||||
private boolean isInnerVariableReference(Field field) {
|
||||
|
||||
if (inners.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (Field inner : inners) {
|
||||
if (inner.getName().equals(field.getName())
|
||||
|| (field.getTarget().contains(".") && field.getTarget().startsWith(inner.getName()))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -18,7 +18,9 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable;
|
||||
@@ -185,7 +187,8 @@ public class VariableOperators {
|
||||
map.putAll(context.getMappedObject(input));
|
||||
map.put("as", itemVariableName);
|
||||
map.put("in",
|
||||
functionToApply.toDocument(new NestedDelegatingExpressionAggregationOperationContext(operationContext)));
|
||||
functionToApply.toDocument(new NestedDelegatingExpressionAggregationOperationContext(operationContext,
|
||||
Collections.singleton(Fields.field(itemVariableName)))));
|
||||
|
||||
return new Document("$map", map);
|
||||
}
|
||||
@@ -322,12 +325,14 @@ public class VariableOperators {
|
||||
|
||||
private Document getMappedVariable(ExpressionVariable var, AggregationOperationContext context) {
|
||||
|
||||
return new Document(var.variableName, var.expression instanceof AggregationExpression
|
||||
? ((AggregationExpression) var.expression).toDocument(context) : var.expression);
|
||||
return new Document(var.variableName,
|
||||
var.expression instanceof AggregationExpression ? ((AggregationExpression) var.expression).toDocument(context)
|
||||
: var.expression);
|
||||
}
|
||||
|
||||
private Object getMappedIn(AggregationOperationContext context) {
|
||||
return expression.toDocument(new NestedDelegatingExpressionAggregationOperationContext(context));
|
||||
return expression.toDocument(new NestedDelegatingExpressionAggregationOperationContext(context,
|
||||
this.vars.stream().map(var -> Fields.field(var.variableName)).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,16 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.UnaryOperator;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.DefaultTypeMapper;
|
||||
import org.springframework.data.convert.SimpleTypeInformationMapper;
|
||||
import org.springframework.data.convert.TypeAliasAccessor;
|
||||
@@ -61,58 +58,21 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<Bson> implements M
|
||||
|
||||
private final TypeAliasAccessor<Bson> accessor;
|
||||
private final @Nullable String typeKey;
|
||||
private UnaryOperator<Class<?>> writeTarget = UnaryOperator.identity();
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code _class}.
|
||||
*/
|
||||
public DefaultMongoTypeMapper() {
|
||||
this(DEFAULT_TYPE_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}.
|
||||
*
|
||||
* @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints.
|
||||
*/
|
||||
public DefaultMongoTypeMapper(@Nullable String typeKey) {
|
||||
this(typeKey, Collections.singletonList(new SimpleTypeInformationMapper()));
|
||||
this(typeKey, Arrays.asList(new SimpleTypeInformationMapper()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}.
|
||||
*
|
||||
* @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints.
|
||||
* @param mappingContext the mapping context.
|
||||
*/
|
||||
public DefaultMongoTypeMapper(@Nullable String typeKey,
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext) {
|
||||
this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext,
|
||||
Collections.singletonList(new SimpleTypeInformationMapper()));
|
||||
Arrays.asList(new SimpleTypeInformationMapper()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses
|
||||
* {@link UnaryOperator} to apply {@link CustomConversions}.
|
||||
*
|
||||
* @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints.
|
||||
* @param mappingContext the mapping context to look up types using type hints.
|
||||
* @see MappingMongoConverter#getWriteTarget(Class)
|
||||
*/
|
||||
public DefaultMongoTypeMapper(@Nullable String typeKey,
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext, UnaryOperator<Class<?>> writeTarget) {
|
||||
this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext,
|
||||
Collections.singletonList(new SimpleTypeInformationMapper()));
|
||||
this.writeTarget = writeTarget;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses
|
||||
* {@link TypeInformationMapper} to map type hints.
|
||||
*
|
||||
* @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints.
|
||||
* @param mappers
|
||||
*/
|
||||
public DefaultMongoTypeMapper(@Nullable String typeKey, List<? extends TypeInformationMapper> mappers) {
|
||||
this(typeKey, new DocumentTypeAliasAccessor(typeKey), null, mappers);
|
||||
}
|
||||
@@ -160,15 +120,6 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<Bson> implements M
|
||||
accessor.writeTypeTo(result, new Document("$in", restrictedMappedTypes));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#getWriteTargetTypeFor(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getWriteTargetTypeFor(Class<?> source) {
|
||||
return writeTarget.apply(source);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.convert.DefaultTypeMapper#getFallbackTypeFor(java.lang.Object)
|
||||
|
||||
@@ -15,33 +15,22 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
@@ -122,8 +111,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
this.dbRefResolver = dbRefResolver;
|
||||
this.mappingContext = mappingContext;
|
||||
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext,
|
||||
this::getWriteTarget);
|
||||
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext);
|
||||
this.idMapper = new QueryMapper(this);
|
||||
|
||||
this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE);
|
||||
@@ -223,11 +211,20 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return conversionService.convert(bson, rawType);
|
||||
}
|
||||
|
||||
if (DBObject.class.isAssignableFrom(rawType)) {
|
||||
if (Document.class.isAssignableFrom(rawType)) {
|
||||
return (S) bson;
|
||||
}
|
||||
|
||||
if (Document.class.isAssignableFrom(rawType)) {
|
||||
if (DBObject.class.isAssignableFrom(rawType)) {
|
||||
|
||||
if (bson instanceof DBObject) {
|
||||
return (S) bson;
|
||||
}
|
||||
|
||||
if (bson instanceof Document) {
|
||||
return (S) new BasicDBObject((Document) bson);
|
||||
}
|
||||
|
||||
return (S) bson;
|
||||
}
|
||||
|
||||
@@ -528,7 +525,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (idProperty != null && !dbObjectAccessor.hasValue(idProperty)) {
|
||||
|
||||
Object value = idMapper.convertId(accessor.getProperty(idProperty), idProperty.getFieldType());
|
||||
Object value = idMapper.convertId(accessor.getProperty(idProperty));
|
||||
|
||||
if (value != null) {
|
||||
dbObjectAccessor.put(idProperty, value);
|
||||
@@ -632,7 +629,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = valueType.isSubTypeOf(prop.getType())
|
||||
MongoPersistentEntity<?> entity = isSubTypeOf(obj.getClass(), prop.getType())
|
||||
? mappingContext.getRequiredPersistentEntity(obj.getClass())
|
||||
: mappingContext.getRequiredPersistentEntity(type);
|
||||
|
||||
@@ -671,10 +668,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected List<Object> createCollection(Collection<?> collection, MongoPersistentProperty property) {
|
||||
|
||||
if (!property.isDbReference()) {
|
||||
|
||||
if (property.hasExplicitWriteTarget()) {
|
||||
return writeCollectionInternal(collection, new TypeInformationWrapper<>(property), new ArrayList<>());
|
||||
}
|
||||
return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList());
|
||||
}
|
||||
|
||||
@@ -754,8 +747,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Class<?> elementType = element == null ? null : element.getClass();
|
||||
|
||||
if (elementType == null || conversions.isSimpleType(elementType)) {
|
||||
collection.add(getPotentiallyConvertedSimpleWrite(element,
|
||||
componentType != null ? componentType.getType() : Object.class));
|
||||
collection.add(getPotentiallyConvertedSimpleWrite(element));
|
||||
} else if (element instanceof Collection || elementType.isArray()) {
|
||||
collection.add(writeCollectionInternal(asCollection(element), componentType, new BasicDBList()));
|
||||
} else {
|
||||
@@ -858,7 +850,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class)
|
||||
? (String) getPotentiallyConvertedSimpleWrite(key, Object.class)
|
||||
? (String) getPotentiallyConvertedSimpleWrite(key)
|
||||
: key.toString();
|
||||
}
|
||||
|
||||
@@ -900,13 +892,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param key must not be {@literal null}.
|
||||
*/
|
||||
private void writeSimpleInternal(Object value, Bson bson, String key) {
|
||||
addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class));
|
||||
addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value));
|
||||
}
|
||||
|
||||
private void writeSimpleInternal(Object value, Bson bson, MongoPersistentProperty property) {
|
||||
DocumentAccessor accessor = new DocumentAccessor(bson);
|
||||
accessor.put(property, getPotentiallyConvertedSimpleWrite(value,
|
||||
property.hasExplicitWriteTarget() ? property.getFieldType() : Object.class));
|
||||
accessor.put(property, getPotentiallyConvertedSimpleWrite(value));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -917,19 +908,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nullable Class<?> typeHint) {
|
||||
private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value) {
|
||||
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeHint != null && Object.class != typeHint) {
|
||||
|
||||
if (conversionService.canConvert(value.getClass(), typeHint)) {
|
||||
value = conversionService.convert(value, typeHint);
|
||||
}
|
||||
}
|
||||
|
||||
Optional<Class<?>> customTarget = conversions.getCustomWriteTarget(value.getClass());
|
||||
|
||||
if (customTarget.isPresent()) {
|
||||
@@ -1002,8 +986,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("Cannot create a reference to an object with a NULL id.");
|
||||
}
|
||||
|
||||
return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), entity,
|
||||
idMapper.convertId(id, idProperty != null ? idProperty.getFieldType() : ObjectId.class));
|
||||
return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), entity, idMapper.convertId(id));
|
||||
}
|
||||
|
||||
throw new MappingException("No id property found on class " + entity.getType());
|
||||
@@ -1033,8 +1016,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(targetType, "Target type must not be null!");
|
||||
Assert.notNull(path, "Object path must not be null!");
|
||||
|
||||
Class<?> collectionType = targetType.isSubTypeOf(Collection.class) //
|
||||
? targetType.getType() //
|
||||
Class<?> collectionType = targetType.getType();
|
||||
collectionType = isSubTypeOf(collectionType, Collection.class) //
|
||||
? collectionType //
|
||||
: List.class;
|
||||
|
||||
TypeInformation<?> componentType = targetType.getComponentType() != null //
|
||||
@@ -1228,8 +1212,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (conversions.isSimpleType(obj.getClass())) {
|
||||
// Doesn't need conversion
|
||||
return getPotentiallyConvertedSimpleWrite(obj,
|
||||
typeInformation != null ? typeInformation.getType() : Object.class);
|
||||
return getPotentiallyConvertedSimpleWrite(obj);
|
||||
}
|
||||
|
||||
if (obj instanceof List) {
|
||||
@@ -1609,7 +1592,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param ref
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
Document readRef(DBRef ref) {
|
||||
return dbRefResolver.fetch(ref);
|
||||
}
|
||||
@@ -1625,16 +1607,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbRefResolver.bulkFetch(references);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the conversion target type if defined or return the {@literal source}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
public Class<?> getWriteTarget(Class<?> source) {
|
||||
return conversions.getCustomWriteTarget(source).orElse(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MappingMongoConverter} using the given {@link MongoDbFactory} when loading {@link DBRef}.
|
||||
@@ -1683,6 +1655,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given type is a sub type of the given reference, i.e. assignable but not the exact same type.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param reference must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static boolean isSubTypeOf(Class<?> type, Class<?> reference) {
|
||||
return !type.equals(reference) && reference.isAssignableFrom(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Marker class used to indicate we have a non root document object here that might be used within an update - so we
|
||||
* need to preserve type hints for potential nested elements but need to remove it on top level.
|
||||
@@ -1703,91 +1686,4 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static class TypeInformationWrapper<S> implements TypeInformation<S> {
|
||||
|
||||
private MongoPersistentProperty persistentProperty;
|
||||
private TypeInformation<?> delegate;
|
||||
|
||||
public TypeInformationWrapper(MongoPersistentProperty property) {
|
||||
|
||||
this.persistentProperty = property;
|
||||
this.delegate = property.getTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<org.springframework.data.util.TypeInformation<?>> getParameterTypes(Constructor constructor) {
|
||||
return persistentProperty.getTypeInformation().getParameterTypes(constructor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getProperty(String property) {
|
||||
return delegate.getProperty(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCollectionLike() {
|
||||
return delegate.isCollectionLike();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getComponentType() {
|
||||
return ClassTypeInformation.from(persistentProperty.getFieldType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMap() {
|
||||
return delegate.isMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getMapValueType() {
|
||||
return ClassTypeInformation.from(persistentProperty.getFieldType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class getType() {
|
||||
return delegate.getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClassTypeInformation<?> getRawTypeInformation() {
|
||||
return delegate.getRawTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getActualType() {
|
||||
return delegate.getActualType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getReturnType(Method method) {
|
||||
return delegate.getReturnType(method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<org.springframework.data.util.TypeInformation<?>> getParameterTypes(Method method) {
|
||||
return delegate.getParameterTypes(method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getSuperTypeInformation(Class superType) {
|
||||
return delegate.getSuperTypeInformation(superType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAssignableFrom(org.springframework.data.util.TypeInformation target) {
|
||||
return delegate.isAssignableFrom(target);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<org.springframework.data.util.TypeInformation<?>> getTypeArguments() {
|
||||
return delegate.getTypeArguments();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation specialize(ClassTypeInformation type) {
|
||||
return delegate.specialize(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,8 +18,6 @@ package org.springframework.data.mongodb.core.convert;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.data.convert.EntityConverter;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
@@ -85,18 +83,7 @@ public interface MongoConverter
|
||||
|
||||
if (sourceDocument.containsKey("$ref") && sourceDocument.containsKey("$id")) {
|
||||
|
||||
Object id = sourceDocument.get("$id");
|
||||
String collection = sourceDocument.getString("$ref");
|
||||
|
||||
MongoPersistentEntity<?> entity = getMappingContext().getPersistentEntity(targetType);
|
||||
if (entity != null && entity.hasIdProperty()) {
|
||||
id = convertId(id, entity.getIdProperty().getFieldType());
|
||||
}
|
||||
|
||||
DBRef ref = sourceDocument.containsKey("$db") ? new DBRef(sourceDocument.getString("$db"), collection, id)
|
||||
: new DBRef(collection, id);
|
||||
|
||||
sourceDocument = dbRefResolver.fetch(ref);
|
||||
sourceDocument = dbRefResolver.fetch(new DBRef(sourceDocument.getString("$ref"), sourceDocument.get("$id")));
|
||||
if (sourceDocument == null) {
|
||||
return null;
|
||||
}
|
||||
@@ -115,38 +102,4 @@ public interface MongoConverter
|
||||
}
|
||||
return getConversionService().convert(source, targetType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given raw id value into either {@link ObjectId} or {@link String}.
|
||||
*
|
||||
* @param id
|
||||
* @param targetType
|
||||
* @return {@literal null} if source {@literal id} is already {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
default Object convertId(@Nullable Object id, Class<?> targetType) {
|
||||
|
||||
if (id == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignable(ObjectId.class, targetType)) {
|
||||
|
||||
if (id instanceof String) {
|
||||
|
||||
if (ObjectId.isValid(id.toString())) {
|
||||
return new ObjectId(id.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return getConversionService().canConvert(id.getClass(), targetType)
|
||||
? getConversionService().convert(id, targetType)
|
||||
: convertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return convertToMongoType(id, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,12 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.data.convert.ConverterBuilder.*;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
@@ -34,7 +31,6 @@ import org.bson.BsonTimestamp;
|
||||
import org.bson.Document;
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.Decimal128;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
@@ -76,9 +72,7 @@ abstract class MongoConverters {
|
||||
List<Object> converters = new ArrayList<>();
|
||||
|
||||
converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
converters.add(BigDecimalToDecimal128Converter.INSTANCE);
|
||||
converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
converters.add(Decimal128ToBigDecimalConverter.INSTANCE);
|
||||
converters.add(BigIntegerToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
converters.add(URLToStringConverter.INSTANCE);
|
||||
@@ -96,8 +90,6 @@ abstract class MongoConverters {
|
||||
converters.add(BinaryToByteArrayConverter.INSTANCE);
|
||||
converters.add(BsonTimestampToInstantConverter.INSTANCE);
|
||||
|
||||
converters.add(reading(String.class, URI.class, URI::create).andWriting(URI::toString));
|
||||
|
||||
return converters;
|
||||
}
|
||||
|
||||
@@ -161,17 +153,6 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @since 2.2
|
||||
*/
|
||||
enum BigDecimalToDecimal128Converter implements Converter<BigDecimal, Decimal128> {
|
||||
INSTANCE;
|
||||
|
||||
public Decimal128 convert(BigDecimal source) {
|
||||
return source == null ? null : new Decimal128(source);
|
||||
}
|
||||
}
|
||||
|
||||
enum StringToBigDecimalConverter implements Converter<String, BigDecimal> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -180,17 +161,6 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @since 2.2
|
||||
*/
|
||||
enum Decimal128ToBigDecimalConverter implements Converter<Decimal128, BigDecimal> {
|
||||
INSTANCE;
|
||||
|
||||
public BigDecimal convert(Decimal128 source) {
|
||||
return source.bigDecimalValue();
|
||||
}
|
||||
}
|
||||
|
||||
enum BigIntegerToStringConverter implements Converter<BigInteger, String> {
|
||||
INSTANCE;
|
||||
|
||||
|
||||
@@ -46,15 +46,4 @@ public interface MongoTypeMapper extends TypeMapper<Bson> {
|
||||
* @param restrictedTypes must not be {@literal null}
|
||||
*/
|
||||
void writeTypeRestrictions(Document result, Set<Class<?>> restrictedTypes);
|
||||
|
||||
/**
|
||||
* Compute the target type for a given source considering {@link org.springframework.data.convert.CustomConversions}.
|
||||
*
|
||||
* @param source the source type.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default Class<?> getWriteTargetTypeFor(Class<?> source) {
|
||||
return source;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,8 +17,10 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
@@ -46,14 +48,14 @@ class ObjectPath {
|
||||
private final @Nullable ObjectPath parent;
|
||||
private final @Nullable Object object;
|
||||
private final @Nullable Object idValue;
|
||||
private final String collection;
|
||||
private final Lazy<String> collection;
|
||||
|
||||
private ObjectPath() {
|
||||
|
||||
this.parent = null;
|
||||
this.object = null;
|
||||
this.idValue = null;
|
||||
this.collection = "";
|
||||
this.collection = Lazy.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -64,7 +66,7 @@ class ObjectPath {
|
||||
* @param idValue
|
||||
* @param collection
|
||||
*/
|
||||
private ObjectPath(ObjectPath parent, Object object, @Nullable Object idValue, String collection) {
|
||||
private ObjectPath(ObjectPath parent, Object object, @Nullable Object idValue, Lazy<String> collection) {
|
||||
|
||||
this.parent = parent;
|
||||
this.object = object;
|
||||
@@ -85,7 +87,7 @@ class ObjectPath {
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
Assert.notNull(entity, "MongoPersistentEntity must not be null!");
|
||||
|
||||
return new ObjectPath(this, object, id, entity.getCollection());
|
||||
return new ObjectPath(this, object, id, Lazy.of(entity::getCollection));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -175,7 +177,7 @@ class ObjectPath {
|
||||
}
|
||||
|
||||
private String getCollection() {
|
||||
return collection;
|
||||
return collection.get();
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -26,7 +26,6 @@ import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
@@ -34,6 +33,7 @@ import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -57,7 +57,6 @@ import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -132,10 +131,8 @@ public class QueryMapper {
|
||||
// TODO: remove one once QueryMapper can work with Query instances directly
|
||||
if (Query.isRestrictedTypeKey(key)) {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Set<Class<?>> restrictedTypes = BsonUtils.get(query, key);
|
||||
this.converter.getTypeMapper().writeTypeRestrictions(result, restrictedTypes);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -257,16 +254,7 @@ public class QueryMapper {
|
||||
*/
|
||||
protected Field createPropertyField(@Nullable MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
if (entity == null) {
|
||||
return new Field(key);
|
||||
}
|
||||
|
||||
if (Field.ID_KEY.equals(key)) {
|
||||
return new MetadataBackedField(key, entity, mappingContext, entity.getIdProperty());
|
||||
}
|
||||
|
||||
return new MetadataBackedField(key, entity, mappingContext);
|
||||
return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -282,7 +270,7 @@ public class QueryMapper {
|
||||
if (keyword.isOrOrNor() || (keyword.hasIterableValue() && !keyword.isGeometry())) {
|
||||
|
||||
Iterable<?> conditions = keyword.getValue();
|
||||
List<Object> newConditions = new ArrayList<Object>();
|
||||
List<Object> newConditions = new ArrayList<>();
|
||||
|
||||
for (Object condition : conditions) {
|
||||
newConditions.add(isDocument(condition) ? getMappedObject((Document) condition, entity)
|
||||
@@ -293,11 +281,12 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
if (keyword.isSample()) {
|
||||
return exampleMapper.getMappedExample(keyword.<Example<?>> getValue(), entity);
|
||||
return exampleMapper.getMappedExample(keyword.getValue(), entity);
|
||||
}
|
||||
|
||||
if (keyword.isJsonSchema()) {
|
||||
return schemaMapper.mapSchema(new Document(keyword.getKey(), keyword.getValue()), entity.getType());
|
||||
return schemaMapper.mapSchema(new Document(keyword.getKey(), keyword.getValue()),
|
||||
entity != null ? entity.getType() : Object.class);
|
||||
}
|
||||
|
||||
return new Document(keyword.getKey(), convertSimpleOrDocument(keyword.getValue(), entity));
|
||||
@@ -318,6 +307,10 @@ public class QueryMapper {
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property)
|
||||
: getMappedValue(property.with(keyword.getKey()), value);
|
||||
|
||||
if (keyword.isSample() && convertedValue instanceof Document) {
|
||||
return (Document) convertedValue;
|
||||
}
|
||||
|
||||
return new Document(keyword.key, convertedValue);
|
||||
}
|
||||
|
||||
@@ -325,21 +318,14 @@ public class QueryMapper {
|
||||
* Returns the mapped value for the given source object assuming it's a value for the given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param documentField the key the value will be bound to eventually
|
||||
* @param value the source object to be mapped
|
||||
* @param property the property the value is a value for
|
||||
* @param newKey the key the value will be bound to eventually
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object getMappedValue(Field documentField, Object value) {
|
||||
|
||||
if(documentField.getProperty() != null && documentField.getProperty().hasExplicitWriteTarget()) {
|
||||
if(conversionService.canConvert(value.getClass(), documentField.getProperty().getFieldType())) {
|
||||
value = conversionService.convert(value, documentField.getProperty().getFieldType());
|
||||
}
|
||||
}
|
||||
|
||||
if (documentField.isIdField() && !documentField.isAssociation()) {
|
||||
|
||||
if (isDBObject(value)) {
|
||||
@@ -348,13 +334,13 @@ public class QueryMapper {
|
||||
|
||||
if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
List<Object> ids = new ArrayList<>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id, getIdTypeForField(documentField)));
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
resultDbo.put(inKey, ids);
|
||||
} else if (valueDbo.containsField("$ne")) {
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne"), getIdTypeForField(documentField)));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject(resultDbo, Optional.empty());
|
||||
}
|
||||
@@ -367,20 +353,20 @@ public class QueryMapper {
|
||||
|
||||
if (valueDbo.containsKey("$in") || valueDbo.containsKey("$nin")) {
|
||||
String inKey = valueDbo.containsKey("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
List<Object> ids = new ArrayList<>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id, getIdTypeForField(documentField)));
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
resultDbo.put(inKey, ids);
|
||||
} else if (valueDbo.containsKey("$ne")) {
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne"), getIdTypeForField(documentField)));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject(resultDbo, Optional.empty());
|
||||
}
|
||||
return resultDbo;
|
||||
|
||||
} else {
|
||||
return convertId(value, getIdTypeForField(documentField));
|
||||
return convertId(value);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -395,14 +381,6 @@ public class QueryMapper {
|
||||
return convertSimpleOrDocument(value, documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
private boolean isIdField(Field documentField) {
|
||||
return documentField.getProperty() != null && documentField.getProperty().isIdProperty();
|
||||
}
|
||||
|
||||
private Class<?> getIdTypeForField(Field documentField) {
|
||||
return isIdField(documentField) ? documentField.getProperty().getFieldType() : ObjectId.class;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Field} represents an association reference that together with the given value
|
||||
* requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the
|
||||
@@ -448,6 +426,10 @@ public class QueryMapper {
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof Example) {
|
||||
return exampleMapper.getMappedExample((Example) source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof List) {
|
||||
return delegateConvertToMongoType(source, entity);
|
||||
}
|
||||
@@ -523,14 +505,7 @@ public class QueryMapper {
|
||||
if (source instanceof DBRef) {
|
||||
|
||||
DBRef ref = (DBRef) source;
|
||||
Object id = convertId(ref.getId(),
|
||||
property != null && property.isIdProperty() ? property.getFieldType() : ObjectId.class);
|
||||
|
||||
if (StringUtils.hasText(ref.getDatabaseName())) {
|
||||
return new DBRef(ref.getDatabaseName(), ref.getCollectionName(), id);
|
||||
} else {
|
||||
return new DBRef(ref.getCollectionName(), id);
|
||||
}
|
||||
return new DBRef(ref.getCollectionName(), convertId(ref.getId()));
|
||||
}
|
||||
|
||||
if (source instanceof Iterable) {
|
||||
@@ -611,24 +586,24 @@ public class QueryMapper {
|
||||
*
|
||||
* @param id
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public Object convertId(@Nullable Object id) {
|
||||
return convertId(id, ObjectId.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given raw id value into either {@link ObjectId} or {@link Class targetType}.
|
||||
*
|
||||
* @param id can be {@literal null}.
|
||||
* @param targetType
|
||||
* @return the converted {@literal id} or {@literal null} if the source was already {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public Object convertId(@Nullable Object id, Class<?> targetType) {
|
||||
return converter.convertId(id, targetType);
|
||||
if (id == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (id instanceof String) {
|
||||
return ObjectId.isValid(id.toString()) ? conversionService.convert(id, ObjectId.class) : id;
|
||||
}
|
||||
|
||||
try {
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService.convert(id, ObjectId.class)
|
||||
: delegateConvertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -776,8 +751,6 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class Field {
|
||||
|
||||
protected static final Pattern POSITIONAL_OPERATOR = Pattern.compile("\\$\\[.*\\]");
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
|
||||
protected final String name;
|
||||
@@ -1046,8 +1019,7 @@ public class QueryMapper {
|
||||
@Nullable
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
String rawPath = pathExpression.replaceAll("\\.\\d+", "") //
|
||||
.replaceAll(POSITIONAL_OPERATOR.pattern(), "");
|
||||
String rawPath = pathExpression.replaceAll("\\.\\d+", "");
|
||||
|
||||
PropertyPath path = forName(rawPath);
|
||||
if (path == null || isPathToJavaLangClassProperty(path)) {
|
||||
@@ -1093,6 +1065,11 @@ public class QueryMapper {
|
||||
private PropertyPath forName(String path) {
|
||||
|
||||
try {
|
||||
|
||||
if (entity.getPersistentProperty(path) != null) {
|
||||
return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation());
|
||||
}
|
||||
|
||||
return PropertyPath.from(path, entity.getTypeInformation());
|
||||
} catch (PropertyReferenceException | InvalidPersistentPropertyPath e) {
|
||||
|
||||
@@ -1222,11 +1199,6 @@ public class QueryMapper {
|
||||
return true;
|
||||
}
|
||||
|
||||
Matcher matcher = POSITIONAL_OPERATOR.matcher(partial);
|
||||
if (matcher.find()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
Long.valueOf(partial);
|
||||
return true;
|
||||
|
||||
@@ -289,7 +289,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$(\\[.*\\])?", ""), entity, mappingContext);
|
||||
super(key.replaceAll("\\.\\$", ""), entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
/**
|
||||
* Interface definition for structures defined in GeoJSON ({@link http://geojson.org/}) format.
|
||||
* Interface definition for structures defined in GeoJSON ({@link https://geojson.org/}) format.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
@@ -27,7 +27,7 @@ public interface GeoJson<T extends Iterable<?>> {
|
||||
* String value representing the type of the {@link GeoJson} object.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#geojson-objects">http://geojson.org/geojson-spec.html#geojson-objects</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#geojson-objects">https://geojson.org/geojson-spec.html#geojson-objects</a>
|
||||
*/
|
||||
String getType();
|
||||
|
||||
@@ -36,7 +36,7 @@ public interface GeoJson<T extends Iterable<?>> {
|
||||
* determined by {@link #getType()} of geometry.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#geometry-objects">http://geojson.org/geojson-spec.html#geometry-objects</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#geometry-objects">https://geojson.org/geojson-spec.html#geometry-objects</a>
|
||||
*/
|
||||
T getCoordinates();
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#geometry-collection">http://geojson.org/geojson-spec.html#geometry-collection</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#geometry-collection">https://geojson.org/geojson-spec.html#geometry-collection</a>
|
||||
*/
|
||||
public class GeoJsonGeometryCollection implements GeoJson<Iterable<GeoJson<?>>> {
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import org.springframework.data.geo.Point;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#linestring">http://geojson.org/geojson-spec.html#linestring</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#linestring">https://geojson.org/geojson-spec.html#linestring</a>
|
||||
*/
|
||||
public class GeoJsonLineString extends GeoJsonMultiPoint {
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#multilinestring">http://geojson.org/geojson-spec.html#multilinestring</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#multilinestring">https://geojson.org/geojson-spec.html#multilinestring</a>
|
||||
*/
|
||||
public class GeoJsonMultiLineString implements GeoJson<Iterable<GeoJsonLineString>> {
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#multipoint">http://geojson.org/geojson-spec.html#multipoint</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#multipoint">https://geojson.org/geojson-spec.html#multipoint</a>
|
||||
*/
|
||||
public class GeoJsonMultiPoint implements GeoJson<Iterable<Point>> {
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ import org.springframework.data.geo.Point;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#point">http://geojson.org/geojson-spec.html#point</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#point">https://geojson.org/geojson-spec.html#point</a>
|
||||
*/
|
||||
public class GeoJsonPoint extends Point implements GeoJson<List<Double>> {
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ import org.springframework.util.Assert;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @see <a href="http://geojson.org/geojson-spec.html#polygon">http://geojson.org/geojson-spec.html#polygon</a>
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#polygon">https://geojson.org/geojson-spec.html#polygon</a>
|
||||
*/
|
||||
public class GeoJsonPolygon extends Polygon implements GeoJson<List<GeoJsonLineString>> {
|
||||
|
||||
|
||||
@@ -36,29 +36,10 @@ import java.lang.annotation.Target;
|
||||
public @interface CompoundIndex {
|
||||
|
||||
/**
|
||||
* The actual index definition in JSON format or a {@link org.springframework.expression.spel.standard.SpelExpression
|
||||
* template expression} resolving to either a JSON String or a {@link org.bson.Document}. The keys of the JSON
|
||||
* document are the fields to be indexed, the values define the index direction (1 for ascending, -1 for descending).
|
||||
* <br />
|
||||
* The actual index definition in JSON format. The keys of the JSON document are the fields to be indexed, the values
|
||||
* define the index direction (1 for ascending, -1 for descending). <br />
|
||||
* If left empty on nested document, the whole document will be indexed.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
*
|
||||
* @Document
|
||||
* @CompoundIndex(def = "{'h1': 1, 'h2': 1}")
|
||||
* class JsonStringIndexDefinition {
|
||||
* String h1, h2;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* @CompoundIndex(def = "#{T(org.bson.Document).parse("{ 'h1': 1, 'h2': 1 }")}")
|
||||
* class ExpressionIndexDefinition {
|
||||
* String h1, h2;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String def() default "";
|
||||
@@ -74,8 +55,7 @@ public @interface CompoundIndex {
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
*/
|
||||
boolean unique() default false;
|
||||
|
||||
@@ -83,23 +63,20 @@ public @interface CompoundIndex {
|
||||
* If set to true index will skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-sparse/">https://docs.mongodb.org/manual/core/index-sparse/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-sparse/">https://docs.mongodb.org/manual/core/index-sparse/</a>
|
||||
*/
|
||||
boolean sparse() default false;
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping">https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping">https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping</a>
|
||||
* @deprecated since 2.1. No longer supported by MongoDB as of server version 3.0.
|
||||
*/
|
||||
@Deprecated
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* Index name of the index to be created either as plain value or as
|
||||
* {@link org.springframework.expression.spel.standard.SpelExpression template expression}. <br />
|
||||
* The name of the index to be created. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
@@ -154,8 +131,7 @@ public @interface CompoundIndex {
|
||||
* If {@literal true} the index will be created in the background.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/indexes/#background-construction">https://docs.mongodb.org/manual/core/indexes/#background-construction</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/indexes/#background-construction">https://docs.mongodb.org/manual/core/indexes/#background-construction</a>
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
|
||||
@@ -1,216 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Duration format styles.
|
||||
* <p/>
|
||||
* Fork of {@code org.springframework.boot.convert.DurationStyle}.
|
||||
*
|
||||
* @author Phillip Webb
|
||||
* @since 2.2
|
||||
*/
|
||||
enum DurationStyle {
|
||||
|
||||
/**
|
||||
* Simple formatting, for example '1s'.
|
||||
*/
|
||||
SIMPLE("^([\\+\\-]?\\d+)([a-zA-Z]{0,2})$") {
|
||||
|
||||
@Override
|
||||
public Duration parse(String value, @Nullable ChronoUnit unit) {
|
||||
try {
|
||||
Matcher matcher = matcher(value);
|
||||
Assert.state(matcher.matches(), "Does not match simple duration pattern");
|
||||
String suffix = matcher.group(2);
|
||||
return (StringUtils.hasLength(suffix) ? Unit.fromSuffix(suffix) : Unit.fromChronoUnit(unit))
|
||||
.parse(matcher.group(1));
|
||||
} catch (Exception ex) {
|
||||
throw new IllegalArgumentException("'" + value + "' is not a valid simple duration", ex);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* ISO-8601 formatting.
|
||||
*/
|
||||
ISO8601("^[\\+\\-]?P.*$") {
|
||||
|
||||
@Override
|
||||
public Duration parse(String value, @Nullable ChronoUnit unit) {
|
||||
try {
|
||||
return Duration.parse(value);
|
||||
} catch (Exception ex) {
|
||||
throw new IllegalArgumentException("'" + value + "' is not a valid ISO-8601 duration", ex);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final Pattern pattern;
|
||||
|
||||
DurationStyle(String pattern) {
|
||||
this.pattern = Pattern.compile(pattern);
|
||||
}
|
||||
|
||||
protected final boolean matches(String value) {
|
||||
return this.pattern.matcher(value).matches();
|
||||
}
|
||||
|
||||
protected final Matcher matcher(String value) {
|
||||
return this.pattern.matcher(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the given value to a duration.
|
||||
*
|
||||
* @param value the value to parse
|
||||
* @return a duration
|
||||
*/
|
||||
public Duration parse(String value) {
|
||||
return parse(value, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the given value to a duration.
|
||||
*
|
||||
* @param value the value to parse
|
||||
* @param unit the duration unit to use if the value doesn't specify one ({@code null} will default to ms)
|
||||
* @return a duration
|
||||
*/
|
||||
public abstract Duration parse(String value, @Nullable ChronoUnit unit);
|
||||
|
||||
/**
|
||||
* Detect the style then parse the value to return a duration.
|
||||
*
|
||||
* @param value the value to parse
|
||||
* @return the parsed duration
|
||||
* @throws IllegalStateException if the value is not a known style or cannot be parsed
|
||||
*/
|
||||
public static Duration detectAndParse(String value) {
|
||||
return detectAndParse(value, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the style then parse the value to return a duration.
|
||||
*
|
||||
* @param value the value to parse
|
||||
* @param unit the duration unit to use if the value doesn't specify one ({@code null} will default to ms)
|
||||
* @return the parsed duration
|
||||
* @throws IllegalStateException if the value is not a known style or cannot be parsed
|
||||
*/
|
||||
public static Duration detectAndParse(String value, @Nullable ChronoUnit unit) {
|
||||
return detect(value).parse(value, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the style from the given source value.
|
||||
*
|
||||
* @param value the source value
|
||||
* @return the duration style
|
||||
* @throws IllegalStateException if the value is not a known style
|
||||
*/
|
||||
public static DurationStyle detect(String value) {
|
||||
Assert.notNull(value, "Value must not be null");
|
||||
for (DurationStyle candidate : values()) {
|
||||
if (candidate.matches(value)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("'" + value + "' is not a valid duration");
|
||||
}
|
||||
|
||||
/**
|
||||
* Units that we support.
|
||||
*/
|
||||
enum Unit {
|
||||
|
||||
/**
|
||||
* Milliseconds.
|
||||
*/
|
||||
MILLIS(ChronoUnit.MILLIS, "ms", Duration::toMillis),
|
||||
|
||||
/**
|
||||
* Seconds.
|
||||
*/
|
||||
SECONDS(ChronoUnit.SECONDS, "s", Duration::getSeconds),
|
||||
|
||||
/**
|
||||
* Minutes.
|
||||
*/
|
||||
MINUTES(ChronoUnit.MINUTES, "m", Duration::toMinutes),
|
||||
|
||||
/**
|
||||
* Hours.
|
||||
*/
|
||||
HOURS(ChronoUnit.HOURS, "h", Duration::toHours),
|
||||
|
||||
/**
|
||||
* Days.
|
||||
*/
|
||||
DAYS(ChronoUnit.DAYS, "d", Duration::toDays);
|
||||
|
||||
private final ChronoUnit chronoUnit;
|
||||
|
||||
private final String suffix;
|
||||
|
||||
private Function<Duration, Long> longValue;
|
||||
|
||||
Unit(ChronoUnit chronoUnit, String suffix, Function<Duration, Long> toUnit) {
|
||||
this.chronoUnit = chronoUnit;
|
||||
this.suffix = suffix;
|
||||
this.longValue = toUnit;
|
||||
}
|
||||
|
||||
public Duration parse(String value) {
|
||||
return Duration.of(Long.valueOf(value), this.chronoUnit);
|
||||
}
|
||||
|
||||
public long longValue(Duration value) {
|
||||
return this.longValue.apply(value);
|
||||
}
|
||||
|
||||
public static Unit fromChronoUnit(ChronoUnit chronoUnit) {
|
||||
if (chronoUnit == null) {
|
||||
return Unit.MILLIS;
|
||||
}
|
||||
for (Unit candidate : values()) {
|
||||
if (candidate.chronoUnit == chronoUnit) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown unit " + chronoUnit);
|
||||
}
|
||||
|
||||
public static Unit fromSuffix(String suffix) {
|
||||
for (Unit candidate : values()) {
|
||||
if (candidate.suffix.equalsIgnoreCase(suffix)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown unit '" + suffix + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -34,8 +34,8 @@ import java.lang.annotation.Target;
|
||||
public @interface GeoSpatialIndexed {
|
||||
|
||||
/**
|
||||
* Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template
|
||||
* expression}. <br />
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
@@ -52,7 +52,6 @@ public @interface GeoSpatialIndexed {
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @GeoSpatialIndexed(name="index") Point h1;
|
||||
* @GeoSpatialIndexed(name="#{@myBean.indexName}") Point h2;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
@@ -68,7 +67,6 @@ public @interface GeoSpatialIndexed {
|
||||
* db.root.createIndex( { hybrid.h1: "2d" } , { name: "hybrid.index" } )
|
||||
* db.root.createIndex( { nested.n1: "2d" } , { name: "nested.index" } )
|
||||
* db.hybrid.createIndex( { h1: "2d" } , { name: "index" } )
|
||||
* db.hybrid.createIndex( { h2: "2d"} , { name: the value myBean.getIndexName() returned } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
@@ -117,20 +116,6 @@ public class Index implements IndexDefinition {
|
||||
return expire(value, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the TTL.
|
||||
*
|
||||
* @param timeout must not be {@literal null}.
|
||||
* @return this.
|
||||
* @throws IllegalArgumentException if given {@literal timeout} is {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Index expire(Duration timeout) {
|
||||
|
||||
Assert.notNull(timeout, "Timeout must not be null!");
|
||||
return expire(timeout.getSeconds());
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies TTL with given {@link TimeUnit}.
|
||||
*
|
||||
|
||||
@@ -25,6 +25,7 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -107,8 +108,8 @@ public class IndexInfo {
|
||||
boolean sparse = sourceDocument.containsKey("sparse") ? (Boolean) sourceDocument.get("sparse") : false;
|
||||
String language = sourceDocument.containsKey("default_language") ? (String) sourceDocument.get("default_language")
|
||||
: "";
|
||||
String partialFilter = sourceDocument.containsKey("partialFilterExpression")
|
||||
? ((Document) sourceDocument.get("partialFilterExpression")).toJson() : null;
|
||||
|
||||
String partialFilter = extractPartialFilterString(sourceDocument);
|
||||
|
||||
IndexInfo info = new IndexInfo(indexFields, name, unique, sparse, language);
|
||||
info.partialFilterExpression = partialFilter;
|
||||
@@ -116,6 +117,21 @@ public class IndexInfo {
|
||||
return info;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sourceDocument
|
||||
* @return the {@link String} representation of the partial filter {@link Document}.
|
||||
* @since 2.1.11
|
||||
*/
|
||||
@Nullable
|
||||
private static String extractPartialFilterString(Document sourceDocument) {
|
||||
|
||||
if (!sourceDocument.containsKey("partialFilterExpression")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return BsonUtils.toJson(sourceDocument.get("partialFilterExpression", Document.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the individual index fields of the index.
|
||||
*
|
||||
|
||||
@@ -15,54 +15,25 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
* @since 1.5
|
||||
*/
|
||||
public interface IndexResolver {
|
||||
interface IndexResolver {
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexResolver} given {@link MongoMappingContext}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @return the new {@link IndexResolver}.
|
||||
* @since 2.2
|
||||
*/
|
||||
static IndexResolver create(MongoMappingContext mappingContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "MongoMappingContext must not be null!");
|
||||
|
||||
return new MongoPersistentEntityIndexResolver(mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s
|
||||
* are created for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s are created
|
||||
* for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @param typeInformation
|
||||
* @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type.
|
||||
*/
|
||||
Iterable<? extends IndexDefinition> resolveIndexFor(TypeInformation<?> typeInformation);
|
||||
|
||||
/**
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s
|
||||
* are created for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @param entityType
|
||||
* @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type.
|
||||
* @see 2.2
|
||||
*/
|
||||
default Iterable<? extends IndexDefinition> resolveIndexFor(Class<?> entityType) {
|
||||
return resolveIndexFor(ClassTypeInformation.from(entityType));
|
||||
}
|
||||
Iterable<? extends IndexDefinitionHolder> resolveIndexFor(TypeInformation<?> typeInformation);
|
||||
|
||||
}
|
||||
|
||||
@@ -30,9 +30,8 @@ import java.lang.annotation.Target;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@Target({ ElementType.ANNOTATION_TYPE, ElementType.FIELD })
|
||||
@Target({ElementType.ANNOTATION_TYPE, ElementType.FIELD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface Indexed {
|
||||
|
||||
@@ -40,8 +39,7 @@ public @interface Indexed {
|
||||
* If set to true reject all documents that contain a duplicate value for the indexed field.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
*/
|
||||
boolean unique() default false;
|
||||
|
||||
@@ -51,23 +49,20 @@ public @interface Indexed {
|
||||
* If set to true index will skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-sparse/">https://docs.mongodb.org/manual/core/index-sparse/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-sparse/">https://docs.mongodb.org/manual/core/index-sparse/</a>
|
||||
*/
|
||||
boolean sparse() default false;
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping">https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping">https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping</a>
|
||||
* @deprecated since 2.1. No longer supported by MongoDB as of server version 3.0.
|
||||
*/
|
||||
@Deprecated
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template
|
||||
* expression}. <br />
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
@@ -85,7 +80,6 @@ public @interface Indexed {
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @Indexed(name="index") String h1;
|
||||
* @Indexed(name="#{@myBean.indexName}") String h2;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
@@ -101,7 +95,6 @@ public @interface Indexed {
|
||||
* db.root.createIndex( { hybrid.h1: 1 } , { name: "hybrid.index" } )
|
||||
* db.root.createIndex( { nested.n1: 1 } , { name: "nested.index" } )
|
||||
* db.hybrid.createIndex( { h1: 1} , { name: "index" } )
|
||||
* db.hybrid.createIndex( { h2: 1} , { name: the value myBean.getIndexName() returned } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
@@ -122,8 +115,7 @@ public @interface Indexed {
|
||||
* If {@literal true} the index will be created in the background.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/indexes/#background-construction">https://docs.mongodb.org/manual/core/indexes/#background-construction</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/indexes/#background-construction">https://docs.mongodb.org/manual/core/indexes/#background-construction</a>
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
@@ -131,38 +123,7 @@ public @interface Indexed {
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/tutorial/expire-data/">https://docs.mongodb.org/manual/tutorial/expire-data/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/tutorial/expire-data/">https://docs.mongodb.org/manual/tutorial/expire-data/</a>
|
||||
*/
|
||||
int expireAfterSeconds() default -1;
|
||||
|
||||
/**
|
||||
* Alternative for {@link #expireAfterSeconds()} to configure the timeout after which the document should expire.
|
||||
* Defaults to an empty {@link String} for no expiry. Accepts numeric values followed by their unit of measure:
|
||||
* <ul>
|
||||
* <li><b>d</b>: Days</li>
|
||||
* <li><b>h</b>: Hours</li>
|
||||
* <li><b>m</b>: Minutes</li>
|
||||
* <li><b>s</b>: Seconds</li>
|
||||
* <li>Alternatively: A Spring {@literal template expression}. The expression can result in a
|
||||
* {@link java.time.Duration} or a valid expiration {@link String} according to the already mentioned
|
||||
* conventions.</li>
|
||||
* </ul>
|
||||
* Supports ISO-8601 style.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* @Indexed(expireAfter = "10s") String expireAfterTenSeconds;
|
||||
*
|
||||
* @Indexed(expireAfter = "1d") String expireAfterOneDay;
|
||||
*
|
||||
* @Indexed(expireAfter = "P2D") String expireAfterTwoDays;
|
||||
*
|
||||
* @Indexed(expireAfter = "#{@mySpringBean.timeout}") String expireAfterTimeoutObtainedFromSpringBean;
|
||||
* </pre>
|
||||
*
|
||||
* @return empty by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
String expireAfter() default "";
|
||||
}
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class JustOnceLogger {
|
||||
|
||||
private static final Map<String, Set<String>> KNOWN_LOGS = new ConcurrentHashMap<>();
|
||||
private static final String AUTO_INDEX_CREATION_CONFIG_CHANGE;
|
||||
|
||||
static {
|
||||
AUTO_INDEX_CREATION_CONFIG_CHANGE = "Automatic index creation will be disabled by default as of Spring Data MongoDB 3.x."
|
||||
+ System.lineSeparator()
|
||||
+ "\tPlease use 'MongoMappingContext#setAutoIndexCreation(boolean)' or override 'MongoConfigurationSupport#autoIndexCreation()' to be explicit."
|
||||
+ System.lineSeparator()
|
||||
+ "\tHowever, we recommend setting up indices manually in an application ready block. You may use index derivation there as well."
|
||||
+ System.lineSeparator() + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> @EventListener(ApplicationReadyEvent.class)" + System.lineSeparator() //
|
||||
+ "\t> public void initIndicesAfterStartup() {" + System.lineSeparator() //
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class);" + System.lineSeparator()//
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexResolver resolver = new MongoPersistentEntityIndexResolver(mongoMappingContext);"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex);" + System.lineSeparator() //
|
||||
+ "\t> }" + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator();
|
||||
}
|
||||
|
||||
static void logWarnIndexCreationConfigurationChange(String loggerName) {
|
||||
warnOnce(loggerName, AUTO_INDEX_CREATION_CONFIG_CHANGE);
|
||||
}
|
||||
|
||||
static void warnOnce(String loggerName, String message) {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(loggerName);
|
||||
if (!logger.isWarnEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!KNOWN_LOGS.containsKey(loggerName)) {
|
||||
|
||||
KNOWN_LOGS.put(loggerName, new ConcurrentSkipListSet<>(Collections.singleton(message)));
|
||||
logger.warn(message);
|
||||
} else {
|
||||
|
||||
Set<String> messages = KNOWN_LOGS.get(loggerName);
|
||||
if (messages.contains(message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
messages.add(message);
|
||||
logger.warn(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -63,13 +63,11 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
/**
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param indexOperationsProvider must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext,
|
||||
IndexOperationsProvider indexOperationsProvider) {
|
||||
this(mappingContext, indexOperationsProvider, IndexResolver.create(mappingContext));
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, IndexOperationsProvider indexOperationsProvider) {
|
||||
this(mappingContext, indexOperationsProvider, new MongoPersistentEntityIndexResolver(mappingContext));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -80,8 +78,8 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param indexResolver must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext,
|
||||
IndexOperationsProvider indexOperationsProvider, IndexResolver indexResolver) {
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, IndexOperationsProvider indexOperationsProvider,
|
||||
IndexResolver indexResolver) {
|
||||
|
||||
Assert.notNull(mappingContext, "MongoMappingContext must not be null!");
|
||||
Assert.notNull(indexOperationsProvider, "IndexOperationsProvider must not be null!");
|
||||
@@ -110,7 +108,6 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
// Double check type as Spring infrastructure does not consider nested generics
|
||||
if (entity instanceof MongoPersistentEntity) {
|
||||
|
||||
checkForIndexes((MongoPersistentEntity<?>) entity);
|
||||
}
|
||||
}
|
||||
@@ -134,16 +131,8 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
private void checkForAndCreateIndexes(MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.isAnnotationPresent(Document.class)) {
|
||||
for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder
|
||||
? (IndexDefinitionHolder) indexDefinition
|
||||
: new IndexDefinitionHolder("", indexDefinition, entity.getCollection());
|
||||
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
createIndex(indexToCreate);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -157,8 +146,8 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
} catch (UncategorizedMongoDbException ex) {
|
||||
|
||||
if (ex.getCause() instanceof MongoException
|
||||
&& MongoDbErrorCodes.isDataIntegrityViolationCode(((MongoException) ex.getCause()).getCode())) {
|
||||
if (ex.getCause() instanceof MongoException &&
|
||||
MongoDbErrorCodes.isDataIntegrityViolationCode(((MongoException) ex.getCause()).getCode())) {
|
||||
|
||||
IndexInfo existingIndex = fetchIndexInformation(indexDefinition);
|
||||
String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'.";
|
||||
|
||||
@@ -19,7 +19,6 @@ import lombok.AccessLevel;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
@@ -33,35 +32,25 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.TextIndexIncludeOptions.IncludeStrategy;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexedFieldSpec;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.spel.EvaluationContextProvider;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.ParserContext;
|
||||
import org.springframework.expression.common.LiteralExpression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -79,10 +68,8 @@ import org.springframework.util.StringUtils;
|
||||
public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexResolver.class);
|
||||
private static final SpelExpressionParser PARSER = new SpelExpressionParser();
|
||||
|
||||
private final MongoMappingContext mappingContext;
|
||||
private EvaluationContextProvider evaluationContextProvider = EvaluationContextProvider.DEFAULT;
|
||||
|
||||
/**
|
||||
* Create new {@link MongoPersistentEntityIndexResolver}.
|
||||
@@ -118,14 +105,15 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
Document document = root.findAnnotation(Document.class);
|
||||
Assert.notNull(document, "Given entity is not collection root.");
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", root.getCollection(), root));
|
||||
indexInformation.addAll(potentiallyCreateTextIndexDefinition(root));
|
||||
List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
String collection = root.getCollection();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root));
|
||||
indexInformation.addAll(potentiallyCreateTextIndexDefinition(root, collection));
|
||||
|
||||
root.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> this
|
||||
.potentiallyAddIndexForProperty(root, property, indexInformation, new CycleGuard()));
|
||||
|
||||
indexInformation.addAll(resolveIndexesForDbrefs("", root.getCollection(), root));
|
||||
indexInformation.addAll(resolveIndexesForDbrefs("", collection, root));
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
@@ -134,13 +122,15 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
List<IndexDefinitionHolder> indexes, CycleGuard guard) {
|
||||
|
||||
try {
|
||||
String collection = root.getCollection();
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexes.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
persistentProperty.getFieldName(), Path.of(persistentProperty), root.getCollection(), guard));
|
||||
persistentProperty.getFieldName(), Path.of(persistentProperty), collection, guard));
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(
|
||||
persistentProperty.getFieldName(), root.getCollection(), persistentProperty);
|
||||
persistentProperty.getFieldName(), collection, persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
}
|
||||
@@ -225,7 +215,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(
|
||||
MongoPersistentEntity<?> root) {
|
||||
MongoPersistentEntity<?> root, String collection) {
|
||||
|
||||
String name = root.getType().getSimpleName() + "_TextIndex";
|
||||
if (name.getBytes().length > 127) {
|
||||
@@ -261,7 +251,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, root.getCollection());
|
||||
IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, collection);
|
||||
return Collections.singletonList(holder);
|
||||
|
||||
}
|
||||
@@ -350,14 +340,15 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return indexDefinitions;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String collection, CompoundIndex index,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(
|
||||
resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def(), entity));
|
||||
resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def()));
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null));
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
@@ -375,8 +366,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString,
|
||||
PersistentEntity<?, ?> entity) {
|
||||
private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString) {
|
||||
|
||||
if (!StringUtils.hasText(dotPath) && !StringUtils.hasText(keyDefinitionString)) {
|
||||
throw new InvalidDataAccessApiUsageException("Cannot create index on root level for empty keys.");
|
||||
@@ -386,11 +376,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new org.bson.Document(dotPath, 1);
|
||||
}
|
||||
|
||||
Object keyDefToUse = evaluate(keyDefinitionString, getEvaluationContextForProperty(entity));
|
||||
|
||||
org.bson.Document dbo = (keyDefToUse instanceof org.bson.Document) ? (org.bson.Document) keyDefToUse
|
||||
: org.bson.Document.parse(ObjectUtils.nullSafeToString(keyDefToUse));
|
||||
|
||||
org.bson.Document dbo = org.bson.Document.parse(keyDefinitionString);
|
||||
if (!StringUtils.hasText(dotPath)) {
|
||||
return dbo;
|
||||
}
|
||||
@@ -426,7 +412,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
IndexDirection.ASCENDING.equals(index.direction()) ? Sort.Direction.ASC : Sort.Direction.DESC);
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty.getOwner(), persitentProperty));
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
@@ -445,66 +431,9 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexDefinition.expire(index.expireAfterSeconds(), TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(index.expireAfter())) {
|
||||
|
||||
if (index.expireAfterSeconds() >= 0) {
|
||||
throw new IllegalStateException(String.format(
|
||||
"@Indexed already defines an expiration timeout of %s seconds via Indexed#expireAfterSeconds. Please make to use either expireAfterSeconds or expireAfter.",
|
||||
index.expireAfterSeconds()));
|
||||
}
|
||||
|
||||
Duration timeout = computeIndexTimeout(index.expireAfter(),
|
||||
getEvaluationContextForProperty(persitentProperty.getOwner()));
|
||||
if (!timeout.isZero() && !timeout.isNegative()) {
|
||||
indexDefinition.expire(timeout);
|
||||
}
|
||||
}
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default {@link EvaluationContext}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected EvaluationContext getEvaluationContext() {
|
||||
return evaluationContextProvider.getEvaluationContext(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link EvaluationContext} for a given {@link PersistentEntity entity} the default one.
|
||||
*
|
||||
* @param persistentEntity can be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private EvaluationContext getEvaluationContextForProperty(@Nullable PersistentEntity<?, ?> persistentEntity) {
|
||||
|
||||
if (persistentEntity == null || !(persistentEntity instanceof BasicMongoPersistentEntity)) {
|
||||
return getEvaluationContext();
|
||||
}
|
||||
|
||||
EvaluationContext contextFromEntity = ((BasicMongoPersistentEntity<?>) persistentEntity).getEvaluationContext(null);
|
||||
|
||||
if (contextFromEntity != null && !EvaluationContextProvider.DEFAULT.equals(contextFromEntity)) {
|
||||
return contextFromEntity;
|
||||
}
|
||||
|
||||
return getEvaluationContext();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link EvaluationContextProvider} used for obtaining the {@link EvaluationContext} used to compute
|
||||
* {@link org.springframework.expression.spel.standard.SpelExpression expressions}.
|
||||
*
|
||||
* @param evaluationContextProvider must not be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void setEvaluationContextProvider(EvaluationContextProvider evaluationContextProvider) {
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link GeoSpatialIndexed} for
|
||||
* {@link MongoPersistentProperty}.
|
||||
@@ -529,8 +458,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexDefinition.withMin(index.min()).withMax(index.max());
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition
|
||||
.named(pathAwareIndexName(index.name(), dotPath, persistentProperty.getOwner(), persistentProperty));
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persistentProperty));
|
||||
}
|
||||
|
||||
indexDefinition.typed(index.type()).withBucketSize(index.bucketSize()).withAdditionalField(index.additionalField());
|
||||
@@ -538,18 +466,9 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private String pathAwareIndexName(String indexName, String dotPath, @Nullable PersistentEntity<?, ?> entity,
|
||||
@Nullable MongoPersistentProperty property) {
|
||||
private String pathAwareIndexName(String indexName, String dotPath, @Nullable MongoPersistentProperty property) {
|
||||
|
||||
String nameToUse = "";
|
||||
if (StringUtils.hasText(indexName)) {
|
||||
|
||||
Object result = evaluate(indexName, getEvaluationContextForProperty(entity));
|
||||
|
||||
if (result != null) {
|
||||
nameToUse = ObjectUtils.nullSafeToString(result);
|
||||
}
|
||||
}
|
||||
String nameToUse = StringUtils.hasText(indexName) ? indexName : "";
|
||||
|
||||
if (!StringUtils.hasText(dotPath) || (property != null && dotPath.equals(property.getFieldName()))) {
|
||||
return StringUtils.hasText(nameToUse) ? nameToUse : dotPath;
|
||||
@@ -595,48 +514,6 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the index timeout value by evaluating a potential
|
||||
* {@link org.springframework.expression.spel.standard.SpelExpression} and parsing the final value.
|
||||
*
|
||||
* @param timeoutValue must not be {@literal null}.
|
||||
* @param evaluationContext must not be {@literal null}.
|
||||
* @return never {@literal null}
|
||||
* @since 2.2
|
||||
* @throws IllegalArgumentException for invalid duration values.
|
||||
*/
|
||||
private static Duration computeIndexTimeout(String timeoutValue, EvaluationContext evaluationContext) {
|
||||
|
||||
Object evaluatedTimeout = evaluate(timeoutValue, evaluationContext);
|
||||
|
||||
if (evaluatedTimeout == null) {
|
||||
return Duration.ZERO;
|
||||
}
|
||||
|
||||
if (evaluatedTimeout instanceof Duration) {
|
||||
return (Duration) evaluatedTimeout;
|
||||
}
|
||||
|
||||
String val = evaluatedTimeout.toString();
|
||||
|
||||
if (val == null) {
|
||||
return Duration.ZERO;
|
||||
}
|
||||
|
||||
return DurationStyle.detectAndParse(val);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static Object evaluate(String value, EvaluationContext evaluationContext) {
|
||||
|
||||
Expression expression = PARSER.parseExpression(value, ParserContext.TEMPLATE_EXPRESSION);
|
||||
if (expression instanceof LiteralExpression) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return expression.getValue(evaluationContext, Object.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used
|
||||
* to detect potential cycles within the references.
|
||||
|
||||
@@ -63,7 +63,7 @@ public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
*/
|
||||
public ReactiveMongoPersistentEntityIndexCreator(MongoMappingContext mappingContext,
|
||||
ReactiveIndexOperationsProvider operationsProvider) {
|
||||
this(mappingContext, operationsProvider, IndexResolver.create(mappingContext));
|
||||
this(mappingContext, operationsProvider, new MongoPersistentEntityIndexResolver(mappingContext));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -125,12 +125,7 @@ public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
List<Mono<?>> publishers = new ArrayList<>();
|
||||
|
||||
if (entity.isAnnotationPresent(Document.class)) {
|
||||
for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
|
||||
IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder
|
||||
? (IndexDefinitionHolder) indexDefinition
|
||||
: new IndexDefinitionHolder("", indexDefinition, entity.getCollection());
|
||||
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
publishers.add(createIndex(indexToCreate));
|
||||
}
|
||||
}
|
||||
@@ -140,8 +135,6 @@ public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
|
||||
Mono<String> createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
return operationsProvider.indexOps(indexDefinition.getCollection()).ensureIndex(indexDefinition) //
|
||||
.onErrorResume(ReactiveMongoPersistentEntityIndexCreator::isDataIntegrityViolation,
|
||||
e -> translateException(e, indexDefinition));
|
||||
|
||||
@@ -29,7 +29,6 @@ import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.model.BasicPersistentEntity;
|
||||
import org.springframework.data.mongodb.MongoCollectionUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.ParserContext;
|
||||
import org.springframework.expression.common.LiteralExpression;
|
||||
@@ -139,15 +138,6 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
verifyFieldTypes();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.BasicPersistentEntity#getEvaluationContext(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public EvaluationContext getEvaluationContext(Object rootObject) {
|
||||
return super.getEvaluationContext(rootObject);
|
||||
}
|
||||
|
||||
private void verifyFieldUniqueness() {
|
||||
|
||||
AssertFieldNameUniquenessHandler handler = new AssertFieldNameUniquenessHandler();
|
||||
|
||||
@@ -67,7 +67,8 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentProperty}.
|
||||
*
|
||||
* @param property
|
||||
* @param field
|
||||
* @param propertyDescriptor
|
||||
* @param owner
|
||||
* @param simpleTypeHolder
|
||||
* @param fieldNamingStrategy
|
||||
@@ -143,36 +144,6 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getFieldType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getFieldType() {
|
||||
|
||||
Field fieldAnnotation = findAnnotation(Field.class);
|
||||
|
||||
if (!isIdProperty()) {
|
||||
|
||||
if (fieldAnnotation == null || fieldAnnotation.targetType() == FieldType.IMPLICIT) {
|
||||
return getType();
|
||||
}
|
||||
|
||||
return fieldAnnotation.targetType().getJavaClass();
|
||||
}
|
||||
|
||||
if (fieldAnnotation == null) {
|
||||
return FieldType.OBJECT_ID.getJavaClass();
|
||||
}
|
||||
|
||||
FieldType fieldType = fieldAnnotation.targetType();
|
||||
if (fieldType == FieldType.IMPLICIT) {
|
||||
return getType();
|
||||
}
|
||||
|
||||
return fieldType.getJavaClass();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if {@link org.springframework.data.mongodb.core.mapping.Field} having non blank
|
||||
* {@link org.springframework.data.mongodb.core.mapping.Field#value()} present.
|
||||
|
||||
@@ -33,7 +33,6 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
private @Nullable boolean dbRefResolved;
|
||||
private @Nullable DBRef dbref;
|
||||
private @Nullable String fieldName;
|
||||
private @Nullable Class<?> fieldType;
|
||||
private @Nullable Boolean usePropertyAccess;
|
||||
private @Nullable Boolean isTransient;
|
||||
|
||||
@@ -90,20 +89,6 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#getFieldType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getFieldType() {
|
||||
|
||||
if (this.fieldType == null) {
|
||||
this.fieldType = super.getFieldType();
|
||||
}
|
||||
|
||||
return this.fieldType;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#usePropertyAccess()
|
||||
|
||||
@@ -21,13 +21,10 @@ import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
|
||||
/**
|
||||
* Annotation to define custom metadata for document fields.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -35,34 +32,16 @@ import org.springframework.core.annotation.AliasFor;
|
||||
public @interface Field {
|
||||
|
||||
/**
|
||||
* The key to be used to store the field inside the document. Alias for {@link #name()}.
|
||||
* The key to be used to store the field inside the document.
|
||||
*
|
||||
* @return an empty {@link String} by default.
|
||||
* @return
|
||||
*/
|
||||
@AliasFor("name")
|
||||
String value() default "";
|
||||
|
||||
/**
|
||||
* The key to be used to store the field inside the document. Alias for {@link #value()}.
|
||||
*
|
||||
* @return an empty {@link String} by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
@AliasFor("value")
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* The order in which various fields shall be stored. Has to be a positive integer.
|
||||
*
|
||||
* @return the order the field shall have in the document or -1 if undefined.
|
||||
*/
|
||||
int order() default Integer.MAX_VALUE;
|
||||
|
||||
/**
|
||||
* The actual desired target type the field should be stored as.
|
||||
*
|
||||
* @return {@link FieldType#IMPLICIT} by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
FieldType targetType() default FieldType.IMPLICIT;
|
||||
}
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.types.BSONTimestamp;
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.Decimal128;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
/**
|
||||
* Enumeration of field value types that can be used to represent a {@link org.bson.Document} field value. This
|
||||
* enumeration contains a subset of {@link org.bson.BsonType} that is supported by the mapping and conversion
|
||||
* components.
|
||||
* <p/>
|
||||
* Bson types are identified by a {@code byte} {@link #getBsonType() value}. This enumeration typically returns the
|
||||
* according bson type value except for {@link #IMPLICIT} which is a marker to derive the field type from a property.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @see org.bson.BsonType
|
||||
*/
|
||||
public enum FieldType {
|
||||
|
||||
/**
|
||||
* Implicit type that is derived from the property value.
|
||||
*/
|
||||
IMPLICIT(-1, Object.class), //
|
||||
DOUBLE(1, Double.class), //
|
||||
STRING(2, String.class), //
|
||||
ARRAY(4, Object[].class), //
|
||||
BINARY(5, Binary.class), //
|
||||
OBJECT_ID(7, ObjectId.class), //
|
||||
BOOLEAN(8, Boolean.class), //
|
||||
DATE_TIME(9, Date.class), //
|
||||
PATTERN(11, Pattern.class), //
|
||||
SCRIPT(13, Code.class), //
|
||||
INT32(15, Integer.class), //
|
||||
TIMESTAMP(16, BSONTimestamp.class), //
|
||||
INT64(17, Long.class), //
|
||||
DECIMAL128(18, Decimal128.class);
|
||||
|
||||
private final int bsonType;
|
||||
private final Class<?> javaClass;
|
||||
|
||||
FieldType(int bsonType, Class<?> javaClass) {
|
||||
|
||||
this.bsonType = bsonType;
|
||||
this.javaClass = javaClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the BSON type identifier. Can be {@code -1} if {@link FieldType} maps to a synthetic Bson type.
|
||||
*
|
||||
* @return the BSON type identifier. Can be {@code -1} if {@link FieldType} maps to a synthetic Bson type.
|
||||
*/
|
||||
public int getBsonType() {
|
||||
return bsonType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Java class used to represent the type.
|
||||
*
|
||||
* @return the Java class used to represent the type.
|
||||
*/
|
||||
public Class<?> getJavaClass() {
|
||||
return javaClass;
|
||||
}
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
import org.springframework.data.annotation.Id;
|
||||
|
||||
/**
|
||||
* {@link MongoId} represents a MongoDB specific {@link Id} annotation that allows customizing {@literal id} conversion.
|
||||
* Id properties use {@link org.springframework.data.mongodb.core.mapping.FieldType#IMPLICIT} as the default
|
||||
* {@literal id's} target type. This means that the actual property value is used. No conversion attempts to any other
|
||||
* type are made. <br />
|
||||
* In contrast to {@link Id @Id}, {@link String} {@literal id's} are stored as the such even when the actual value
|
||||
* represents a valid {@link org.bson.types.ObjectId#isValid(String) ObjectId hex String}. To trigger {@link String} to
|
||||
* {@link org.bson.types.ObjectId} conversion use {@link MongoId#targetType() @MongoId(FieldType.OBJECT_ID)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
@Id
|
||||
@Field
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
public @interface MongoId {
|
||||
|
||||
/**
|
||||
* @return the preferred id type.
|
||||
* @see #targetType()
|
||||
*/
|
||||
@AliasFor(annotation = Field.class, attribute="targetType")
|
||||
FieldType value() default FieldType.IMPLICIT;
|
||||
|
||||
/**
|
||||
* Get the preferred {@literal _id} type to be used. Defaults to {@link FieldType#IMPLICIT} which uses the property's
|
||||
* type. If defined different, the given value is attempted to be converted into the desired target type via
|
||||
* {@link org.springframework.data.mongodb.core.convert.MongoConverter#convertId(Object, Class)}.
|
||||
*
|
||||
* @return the preferred {@literal id} type. {@link FieldType#IMPLICIT} by default.
|
||||
*/
|
||||
@AliasFor(annotation = Field.class, attribute="targetType")
|
||||
FieldType targetType() default FieldType.IMPLICIT;
|
||||
|
||||
}
|
||||
@@ -43,7 +43,6 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
|
||||
private FieldNamingStrategy fieldNamingStrategy = DEFAULT_NAMING_STRATEGY;
|
||||
private @Nullable ApplicationContext context;
|
||||
private boolean autoIndexCreation = true;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoMappingContext}.
|
||||
@@ -102,30 +101,4 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
|
||||
this.context = applicationContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether auto-index creation is enabled or disabled. <br />
|
||||
* <strong>NOTE:</strong>Index creation should happen at a well-defined time that is ideally controlled by the
|
||||
* application itself.
|
||||
*
|
||||
* @return {@literal true} when auto-index creation is enabled; {@literal false} otherwise.
|
||||
* @since 2.2
|
||||
* @see org.springframework.data.mongodb.core.index.Indexed
|
||||
*/
|
||||
public boolean isAutoIndexCreation() {
|
||||
return autoIndexCreation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables/disables auto-index creation. <br />
|
||||
* <strong>NOTE:</strong>Index creation should happen at a well-defined time that is ideally controlled by the
|
||||
* application itself.
|
||||
*
|
||||
* @param autoCreateIndexes set to {@literal false} to disable auto-index creation.
|
||||
* @since 2.2
|
||||
* @see org.springframework.data.mongodb.core.index.Indexed
|
||||
*/
|
||||
public void setAutoIndexCreation(boolean autoCreateIndexes) {
|
||||
this.autoIndexCreation = autoCreateIndexes;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,15 +38,6 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
*/
|
||||
String getFieldName();
|
||||
|
||||
/**
|
||||
* Returns the {@link Class Java FieldType} of the field a property is persisted to.
|
||||
*
|
||||
* @return
|
||||
* @since 2.2
|
||||
* @see FieldType
|
||||
*/
|
||||
Class<?> getFieldType();
|
||||
|
||||
/**
|
||||
* Returns the order of the field if defined. Will return -1 if undefined.
|
||||
*
|
||||
@@ -105,30 +96,12 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
@Nullable
|
||||
DBRef getDBRef();
|
||||
|
||||
/**
|
||||
* Returns whether property access shall be used for reading the property value. This means it will use the getter
|
||||
* instead of field access.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean usePropertyAccess();
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the property defines an explicit {@link Field#targetType() target type}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default boolean hasExplicitWriteTarget() {
|
||||
|
||||
Field field = findAnnotation(Field.class);
|
||||
return field != null ? !FieldType.IMPLICIT.equals(field.targetType()) : false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple {@link Converter} implementation to transform a {@link MongoPersistentProperty} into its field name.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
enum PropertyToFieldNameConverter implements Converter<MongoPersistentProperty, String> {
|
||||
public enum PropertyToFieldNameConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -140,4 +113,12 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
return source.getFieldName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether property access shall be used for reading the property value. This means it will use the getter
|
||||
* instead of field access.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean usePropertyAccess();
|
||||
}
|
||||
|
||||
@@ -24,12 +24,10 @@ import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.*;
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.CodeWScope;
|
||||
import org.bson.types.CodeWithScope;
|
||||
import org.bson.types.Decimal128;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.bson.types.Symbol;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
@@ -45,25 +43,22 @@ public abstract class MongoSimpleTypes {
|
||||
public static final Set<Class<?>> AUTOGENERATED_ID_TYPES;
|
||||
|
||||
static {
|
||||
Set<Class<?>> classes = new HashSet<Class<?>>();
|
||||
Set<Class<?>> classes = new HashSet<>();
|
||||
classes.add(ObjectId.class);
|
||||
classes.add(String.class);
|
||||
classes.add(BigInteger.class);
|
||||
AUTOGENERATED_ID_TYPES = Collections.unmodifiableSet(classes);
|
||||
|
||||
Set<Class<?>> simpleTypes = new HashSet<Class<?>>();
|
||||
simpleTypes.add(Binary.class);
|
||||
simpleTypes.add(BsonObjectId.class);
|
||||
Set<Class<?>> simpleTypes = new HashSet<>();
|
||||
simpleTypes.add(DBRef.class);
|
||||
simpleTypes.add(Decimal128.class);
|
||||
simpleTypes.add(org.bson.Document.class);
|
||||
simpleTypes.add(Code.class);
|
||||
simpleTypes.add(ObjectId.class);
|
||||
simpleTypes.add(CodeWScope.class);
|
||||
simpleTypes.add(CodeWithScope.class);
|
||||
simpleTypes.add(ObjectId.class);
|
||||
simpleTypes.add(org.bson.Document.class);
|
||||
simpleTypes.add(Pattern.class);
|
||||
simpleTypes.add(Symbol.class);
|
||||
simpleTypes.add(Binary.class);
|
||||
simpleTypes.add(UUID.class);
|
||||
simpleTypes.add(Decimal128.class);
|
||||
|
||||
simpleTypes.add(BsonBinary.class);
|
||||
simpleTypes.add(BsonBoolean.class);
|
||||
@@ -71,7 +66,6 @@ public abstract class MongoSimpleTypes {
|
||||
simpleTypes.add(BsonDbPointer.class);
|
||||
simpleTypes.add(BsonDecimal128.class);
|
||||
simpleTypes.add(BsonDocument.class);
|
||||
simpleTypes.add(BsonDocument.class);
|
||||
simpleTypes.add(BsonDouble.class);
|
||||
simpleTypes.add(BsonInt32.class);
|
||||
simpleTypes.add(BsonInt64.class);
|
||||
@@ -86,7 +80,19 @@ public abstract class MongoSimpleTypes {
|
||||
}
|
||||
|
||||
private static final Set<Class<?>> MONGO_SIMPLE_TYPES;
|
||||
public static final SimpleTypeHolder HOLDER = new SimpleTypeHolder(MONGO_SIMPLE_TYPES, true);
|
||||
|
||||
public static final SimpleTypeHolder HOLDER = new SimpleTypeHolder(MONGO_SIMPLE_TYPES, true) {
|
||||
|
||||
@Override
|
||||
public boolean isSimpleType(Class<?> type) {
|
||||
|
||||
if (type.getName().startsWith("java.time")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return super.isSimpleType(type);
|
||||
}
|
||||
};
|
||||
|
||||
private MongoSimpleTypes() {}
|
||||
}
|
||||
|
||||
@@ -115,7 +115,8 @@ class ChangeStreamTask extends CursorReadingTask<ChangeStreamDocument<Document>,
|
||||
.orElseGet(() -> ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT
|
||||
: FullDocument.UPDATE_LOOKUP);
|
||||
|
||||
startAt = changeStreamOptions.getResumeBsonTimestamp().orElse(null);
|
||||
startAt = changeStreamOptions.getResumeTimestamp().map(it -> new BsonTimestamp((int) it.getEpochSecond(), 0))
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
MongoDatabase db = StringUtils.hasText(options.getDatabaseName())
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user