Compare commits
47 Commits
1.9.0.M1
...
1.8.4.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1e6632846f | ||
|
|
179b246173 | ||
|
|
8f29600eb4 | ||
|
|
717c6100eb | ||
|
|
38dade7c7a | ||
|
|
4a4c53766e | ||
|
|
7c5d15edb5 | ||
|
|
8c2af8f0fc | ||
|
|
419d0a4f92 | ||
|
|
ce919e57c6 | ||
|
|
781de00ab1 | ||
|
|
e06d352a71 | ||
|
|
294990891c | ||
|
|
530f7396fa | ||
|
|
79aabfbbde | ||
|
|
6af3729fb3 | ||
|
|
437a48ff4a | ||
|
|
583339641d | ||
|
|
8af4bef772 | ||
|
|
b0336c27a9 | ||
|
|
0b492b6c55 | ||
|
|
b3116a523b | ||
|
|
2ba9e5f403 | ||
|
|
8b1805a145 | ||
|
|
5832055840 | ||
|
|
a13e7b8b24 | ||
|
|
a152aa3ce8 | ||
|
|
ca56ea4aea | ||
|
|
284e2f462d | ||
|
|
257bc891dd | ||
|
|
d26db17bf0 | ||
|
|
d760d9cc11 | ||
|
|
be65970710 | ||
|
|
418a4f8b8c | ||
|
|
fa5f93aad5 | ||
|
|
504e14d4a3 | ||
|
|
f68effe155 | ||
|
|
6fc80f287e | ||
|
|
02aed56fd1 | ||
|
|
f1771504f6 | ||
|
|
741a27edae | ||
|
|
e1869abf3f | ||
|
|
c7be5bfcaa | ||
|
|
9968b752e7 | ||
|
|
e001c6bf89 | ||
|
|
913d383b99 | ||
|
|
f446d7e29f |
@@ -1,27 +0,0 @@
|
||||
= Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
|
||||
|
||||
We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery
|
||||
* Personal attacks
|
||||
* Trolling or insulting/derogatory comments
|
||||
* Public or private harassment
|
||||
* Publishing other's private information, such as physical or electronic addresses,
|
||||
without explicit permission
|
||||
* Other unethical or unprofessional conduct
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io.
|
||||
All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
|
||||
Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
This Code of Conduct is adapted from the http://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at http://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/].
|
||||
1
CONTRIBUTING.MD
Normal file
1
CONTRIBUTING.MD
Normal file
@@ -0,0 +1 @@
|
||||
You find the contribution guidelines for Spring Data projects [here](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md).
|
||||
@@ -1,3 +0,0 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
57
pom.xml
57
pom.xml
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.9.0.M1</version>
|
||||
<version>1.8.4.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.8.0.M1</version>
|
||||
<version>1.7.4.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -28,8 +28,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.12.0.M1</springdata.commons>
|
||||
<mongo>2.14.0</mongo>
|
||||
<springdata.commons>1.11.4.RELEASE</springdata.commons>
|
||||
<mongo>2.13.0</mongo>
|
||||
<mongo.osgi>2.13.0</mongo.osgi>
|
||||
</properties>
|
||||
|
||||
@@ -123,7 +123,7 @@
|
||||
|
||||
<id>mongo3</id>
|
||||
<properties>
|
||||
<mongo>3.0.4</mongo>
|
||||
<mongo>3.0.2</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
@@ -132,7 +132,7 @@
|
||||
|
||||
<id>mongo3-next</id>
|
||||
<properties>
|
||||
<mongo>3.0.5-SNAPSHOT</mongo>
|
||||
<mongo>3.0.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
@@ -143,45 +143,6 @@
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo31</id>
|
||||
<properties>
|
||||
<mongo>3.1.0</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo32-next</id>
|
||||
<properties>
|
||||
<mongo>3.2.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>release</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.jfrog.buildinfo</groupId>
|
||||
<artifactId>artifactory-maven-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
@@ -195,8 +156,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.9.0.M1</version>
|
||||
<version>1.8.4.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.9.0.M1</version>
|
||||
<version>1.8.4.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.9.0.M1</version>
|
||||
<version>1.8.4.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.9.0.M1</version>
|
||||
<version>1.8.4.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<context version="7.2.2.230">
|
||||
<context version="7.1.10.209">
|
||||
<scope type="Project" name="spring-data-mongodb">
|
||||
<element type="TypeFilterReferenceOverridden" name="Filter">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.mongodb.**"/>
|
||||
@@ -35,12 +35,6 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Implementation" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="CDI">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.cdi.**"/>
|
||||
</element>
|
||||
<stereotype name="Unrestricted"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Config" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
@@ -82,11 +76,6 @@
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Script">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.script.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Conversion">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.core.convert.**"/>
|
||||
@@ -94,7 +83,6 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Script" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="SpEL">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
@@ -117,11 +105,6 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="MapReduce">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.mapreduce.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.core.**"/>
|
||||
@@ -130,10 +113,8 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Conversion" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Index" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|MapReduce" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Script" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Util">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
@@ -188,32 +169,7 @@
|
||||
</element>
|
||||
<element type="Subsystem" name="Querydsl">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.querydsl.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Slf4j">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.slf4j.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Jackson">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.fasterxml.jackson.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="DOM">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.w3c.dom.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="AOP Alliance">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.aopalliance.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Guava">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.google.common.**"/>
|
||||
<element type="IncludeTypePattern" name="com.mysema.query.**"/>
|
||||
</element>
|
||||
</element>
|
||||
</architecture>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.9.0.M1</version>
|
||||
<version>1.8.4.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -59,14 +59,14 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<artifactId>querydsl-mongodb</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<artifactId>querydsl-apt</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<scope>provided</scope>
|
||||
@@ -183,7 +183,7 @@
|
||||
<version>${apt}</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<artifactId>querydsl-apt</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
</dependency>
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.BulkWriteError;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Is thrown when errors occur during bulk operations.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
public class BulkOperationException extends DataAccessException {
|
||||
|
||||
private static final long serialVersionUID = 73929601661154421L;
|
||||
|
||||
private final List<BulkWriteError> errors;
|
||||
private final BulkWriteResult result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
|
||||
*
|
||||
* @param message must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
public BulkOperationException(String message, BulkWriteException source) {
|
||||
|
||||
super(message, source);
|
||||
|
||||
this.errors = source.getWriteErrors();
|
||||
this.result = source.getWriteResult();
|
||||
}
|
||||
|
||||
public List<BulkWriteError> getErrors() {
|
||||
return errors;
|
||||
}
|
||||
|
||||
public BulkWriteResult getResult() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.Tuple;
|
||||
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB
|
||||
* 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add
|
||||
* multiple single operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* {@link #execute()}.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
public interface BulkOperations {
|
||||
|
||||
/**
|
||||
* Mode for bulk operation.
|
||||
**/
|
||||
public enum BulkMode {
|
||||
|
||||
/** Perform bulk operations in sequence. The first error will cancel processing. */
|
||||
ORDERED,
|
||||
|
||||
/** Perform bulk operations in parallel. Processing will continue on errors. */
|
||||
UNORDERED
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a single insert to the bulk operation.
|
||||
*
|
||||
* @param documents the document to insert, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations insert(Object documents);
|
||||
|
||||
/**
|
||||
* Add a list of inserts to the bulk operation.
|
||||
*
|
||||
* @param documents List of documents to insert, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations insert(List<? extends Object> documents);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
||||
*
|
||||
* @param query update criteria, must not be {@literal null}.
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
|
||||
*
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(List<Tuple<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
|
||||
*
|
||||
* @param updates Update operations to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(List<Tuple<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations upsert(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param updates Updates/insert operations to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations upsert(List<Tuple<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single remove operation to the bulk operation.
|
||||
*
|
||||
* @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations remove(Query remove);
|
||||
|
||||
/**
|
||||
* Add a list of remove operations to the bulk operation.
|
||||
*
|
||||
* @param removes the remove operations to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
* @return Result of the bulk operation providing counters for inserts/updates etc.
|
||||
* @throws {@link BulkOperationException} if an error occurred during bulk processing.
|
||||
*/
|
||||
BulkWriteResult execute();
|
||||
}
|
||||
@@ -1,327 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.Tuple;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteOperation;
|
||||
import com.mongodb.BulkWriteRequestBuilder;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final BulkMode bulkMode;
|
||||
private final String collectionName;
|
||||
private final Class<?> entityType;
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private WriteConcernResolver writeConcernResolver;
|
||||
private WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOperation bulk;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultBulkOperations} for the given {@link MongoOperations}, {@link BulkMode}, collection
|
||||
* name and {@link WriteConcern}.
|
||||
*
|
||||
* @param mongoOperations The underlying {@link MongoOperations}, must not be {@literal null}.
|
||||
* @param bulkMode must not be {@literal null}.
|
||||
* @param collectionName Name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @param entityType the entity type, can be {@literal null}.
|
||||
*/
|
||||
DefaultBulkOperations(MongoOperations mongoOperations, BulkMode bulkMode, String collectionName,
|
||||
Class<?> entityType) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.notNull(bulkMode, "BulkMode must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.bulkMode = bulkMode;
|
||||
this.collectionName = collectionName;
|
||||
this.entityType = entityType;
|
||||
|
||||
this.exceptionTranslator = new MongoExceptionTranslator();
|
||||
this.writeConcernResolver = DefaultWriteConcernResolver.INSTANCE;
|
||||
|
||||
this.bulk = initBulkOperation();
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @param exceptionTranslator can be {@literal null}.
|
||||
*/
|
||||
public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) {
|
||||
this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcernResolver} to be used. Defaults to {@link DefaultWriteConcernResolver}.
|
||||
*
|
||||
* @param writeConcernResolver can be {@literal null}.
|
||||
*/
|
||||
public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) {
|
||||
this.writeConcernResolver = writeConcernResolver == null ? DefaultWriteConcernResolver.INSTANCE
|
||||
: writeConcernResolver;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
|
||||
*
|
||||
* @param defaultWriteConcern can be {@literal null}.
|
||||
*/
|
||||
public void setDefaultWriteConcern(WriteConcern defaultWriteConcern) {
|
||||
this.defaultWriteConcern = defaultWriteConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(Object document) {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
bulk.insert((DBObject) mongoOperations.getConverter().convertToMongoType(document));
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(List<? extends Object> documents) {
|
||||
|
||||
Assert.notNull(documents, "Documents must not be null!");
|
||||
|
||||
for (Object document : documents) {
|
||||
insert(document);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateOne(Query query, Update update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
return updateOne(Arrays.asList(Tuple.of(query, update)));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateOne(List<Tuple<Query, Update>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
|
||||
for (Tuple<Query, Update> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, false);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateMulti(Query query, Update update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
return updateMulti(Arrays.asList(Tuple.of(query, update)));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateMulti(List<Tuple<Query, Update>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
|
||||
for (Tuple<Query, Update> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, true);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(Query query, Update update) {
|
||||
return update(query, update, true, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(List<Tuple<Query, Update>> updates) {
|
||||
|
||||
for (Tuple<Query, Update> update : updates) {
|
||||
upsert(update.getFirst(), update.getSecond());
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
bulk.find(query.getQueryObject()).remove();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(List<Query> removes) {
|
||||
|
||||
Assert.notNull(removes, "Removals must not be null!");
|
||||
|
||||
for (Query query : removes) {
|
||||
remove(query);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
*/
|
||||
@Override
|
||||
public BulkWriteResult execute() {
|
||||
|
||||
MongoAction action = new MongoAction(defaultWriteConcern, MongoActionOperation.BULK, collectionName, entityType,
|
||||
null, null);
|
||||
WriteConcern writeConcern = writeConcernResolver.resolve(action);
|
||||
|
||||
try {
|
||||
|
||||
return writeConcern == null ? bulk.execute() : bulk.execute(writeConcern);
|
||||
|
||||
} catch (BulkWriteException o_O) {
|
||||
|
||||
DataAccessException toThrow = exceptionTranslator.translateExceptionIfPossible(o_O);
|
||||
throw toThrow == null ? o_O : toThrow;
|
||||
|
||||
} finally {
|
||||
this.bulk = initBulkOperation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
* @param query the {@link Query} to determine documents to update.
|
||||
* @param update the {@link Update} to perform, must not be {@literal null}.
|
||||
* @param upsert whether to upsert.
|
||||
* @param multi whether to issue a multi-update.
|
||||
* @return the {@link BulkOperations} with the update registered.
|
||||
*/
|
||||
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
BulkWriteRequestBuilder builder = bulk.find(query.getQueryObject());
|
||||
|
||||
if (upsert) {
|
||||
|
||||
if (multi) {
|
||||
builder.upsert().update(update.getUpdateObject());
|
||||
} else {
|
||||
builder.upsert().updateOne(update.getUpdateObject());
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
if (multi) {
|
||||
builder.update(update.getUpdateObject());
|
||||
} else {
|
||||
builder.updateOne(update.getUpdateObject());
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
private final BulkWriteOperation initBulkOperation() {
|
||||
|
||||
DBCollection collection = mongoOperations.getCollection(collectionName);
|
||||
|
||||
switch (bulkMode) {
|
||||
case ORDERED:
|
||||
return collection.initializeOrderedBulkOperation();
|
||||
case UNORDERED:
|
||||
return collection.initializeUnorderedBulkOperation();
|
||||
}
|
||||
|
||||
throw new IllegalStateException("BulkMode was null!");
|
||||
}
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Default {@link WriteConcernResolver} resolving the {@link WriteConcern} from the given {@link MongoAction}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
enum DefaultWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
public WriteConcern resolve(MongoAction action) {
|
||||
return action.getDefaultWriteConcern();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,5 +25,5 @@ package org.springframework.data.mongodb.core;
|
||||
*/
|
||||
public enum MongoActionOperation {
|
||||
|
||||
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK;
|
||||
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -27,12 +28,9 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
/**
|
||||
@@ -46,12 +44,12 @@ import com.mongodb.MongoException;
|
||||
*/
|
||||
public class MongoExceptionTranslator implements PersistenceExceptionTranslator {
|
||||
|
||||
private static final Set<String> DULICATE_KEY_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoException.DuplicateKey", "DuplicateKeyException"));
|
||||
private static final Set<String> DULICATE_KEY_EXCEPTIONS = new HashSet<String>(Arrays.asList(
|
||||
"MongoException.DuplicateKey", "DuplicateKeyException"));
|
||||
|
||||
private static final Set<String> RESOURCE_FAILURE_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound",
|
||||
"MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException"));
|
||||
private static final Set<String> RESOURCE_FAILURE_EXCEPTIONS = new HashSet<String>(Arrays.asList(
|
||||
"MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound",
|
||||
"MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException"));
|
||||
|
||||
private static final Set<String> RESOURCE_USAGE_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoInternalException"));
|
||||
@@ -85,10 +83,6 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof BulkWriteException) {
|
||||
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
@@ -112,4 +106,126 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
// that translation should not occur.
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoDbErrorCodes} holds MongoDB specific error codes outlined in {@literal mongo/base/error_codes.err}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
public static final class MongoDbErrorCodes {
|
||||
|
||||
static HashMap<Integer, String> dataAccessResourceFailureCodes;
|
||||
static HashMap<Integer, String> dataIntegrityViolationCodes;
|
||||
static HashMap<Integer, String> duplicateKeyCodes;
|
||||
static HashMap<Integer, String> invalidDataAccessApiUsageExeption;
|
||||
static HashMap<Integer, String> permissionDeniedCodes;
|
||||
|
||||
static HashMap<Integer, String> errorCodes;
|
||||
|
||||
static {
|
||||
|
||||
dataAccessResourceFailureCodes = new HashMap<Integer, String>(10);
|
||||
dataAccessResourceFailureCodes.put(6, "HostUnreachable");
|
||||
dataAccessResourceFailureCodes.put(7, "HostNotFound");
|
||||
dataAccessResourceFailureCodes.put(89, "NetworkTimeout");
|
||||
dataAccessResourceFailureCodes.put(91, "ShutdownInProgress");
|
||||
dataAccessResourceFailureCodes.put(12000, "SlaveDelayDifferential");
|
||||
dataAccessResourceFailureCodes.put(10084, "CannotFindMapFile64Bit");
|
||||
dataAccessResourceFailureCodes.put(10085, "CannotFindMapFile");
|
||||
dataAccessResourceFailureCodes.put(10357, "ShutdownInProgress");
|
||||
dataAccessResourceFailureCodes.put(10359, "Header==0");
|
||||
dataAccessResourceFailureCodes.put(13440, "BadOffsetInFile");
|
||||
dataAccessResourceFailureCodes.put(13441, "BadOffsetInFile");
|
||||
dataAccessResourceFailureCodes.put(13640, "DataFileHeaderCorrupt");
|
||||
|
||||
dataIntegrityViolationCodes = new HashMap<Integer, String>(6);
|
||||
dataIntegrityViolationCodes.put(67, "CannotCreateIndex");
|
||||
dataIntegrityViolationCodes.put(68, "IndexAlreadyExists");
|
||||
dataIntegrityViolationCodes.put(85, "IndexOptionsConflict");
|
||||
dataIntegrityViolationCodes.put(86, "IndexKeySpecsConflict");
|
||||
dataIntegrityViolationCodes.put(112, "WriteConflict");
|
||||
dataIntegrityViolationCodes.put(117, "ConflictingOperationInProgress");
|
||||
|
||||
duplicateKeyCodes = new HashMap<Integer, String>(3);
|
||||
duplicateKeyCodes.put(3, "OBSOLETE_DuplicateKey");
|
||||
duplicateKeyCodes.put(84, "DuplicateKeyValue");
|
||||
duplicateKeyCodes.put(11000, "DuplicateKey");
|
||||
duplicateKeyCodes.put(11001, "DuplicateKey");
|
||||
|
||||
invalidDataAccessApiUsageExeption = new HashMap<Integer, String>();
|
||||
invalidDataAccessApiUsageExeption.put(5, "GraphContainsCycle");
|
||||
invalidDataAccessApiUsageExeption.put(9, "FailedToParse");
|
||||
invalidDataAccessApiUsageExeption.put(14, "TypeMismatch");
|
||||
invalidDataAccessApiUsageExeption.put(15, "Overflow");
|
||||
invalidDataAccessApiUsageExeption.put(16, "InvalidLength");
|
||||
invalidDataAccessApiUsageExeption.put(20, "IllegalOperation");
|
||||
invalidDataAccessApiUsageExeption.put(21, "EmptyArrayOperation");
|
||||
invalidDataAccessApiUsageExeption.put(22, "InvalidBSON");
|
||||
invalidDataAccessApiUsageExeption.put(23, "AlreadyInitialized");
|
||||
invalidDataAccessApiUsageExeption.put(29, "NonExistentPath");
|
||||
invalidDataAccessApiUsageExeption.put(30, "InvalidPath");
|
||||
invalidDataAccessApiUsageExeption.put(40, "ConflictingUpdateOperators");
|
||||
invalidDataAccessApiUsageExeption.put(45, "UserDataInconsistent");
|
||||
invalidDataAccessApiUsageExeption.put(30, "DollarPrefixedFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(52, "InvalidPath");
|
||||
invalidDataAccessApiUsageExeption.put(53, "InvalidIdField");
|
||||
invalidDataAccessApiUsageExeption.put(54, "NotSingleValueField");
|
||||
invalidDataAccessApiUsageExeption.put(55, "InvalidDBRef");
|
||||
invalidDataAccessApiUsageExeption.put(56, "EmptyFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(57, "DottedFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(59, "CommandNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(60, "DatabaseNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(61, "ShardKeyNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(62, "OplogOperationUnsupported");
|
||||
invalidDataAccessApiUsageExeption.put(66, "ImmutableField");
|
||||
invalidDataAccessApiUsageExeption.put(72, "InvalidOptions");
|
||||
invalidDataAccessApiUsageExeption.put(115, "CommandNotSupported");
|
||||
invalidDataAccessApiUsageExeption.put(116, "DocTooLargeForCapped");
|
||||
invalidDataAccessApiUsageExeption.put(130, "SymbolNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(17280, "KeyTooLong");
|
||||
invalidDataAccessApiUsageExeption.put(13334, "ShardKeyTooBig");
|
||||
|
||||
permissionDeniedCodes = new HashMap<Integer, String>();
|
||||
permissionDeniedCodes.put(11, "UserNotFound");
|
||||
permissionDeniedCodes.put(18, "AuthenticationFailed");
|
||||
permissionDeniedCodes.put(31, "RoleNotFound");
|
||||
permissionDeniedCodes.put(32, "RolesNotRelated");
|
||||
permissionDeniedCodes.put(33, "PrvilegeNotFound");
|
||||
permissionDeniedCodes.put(15847, "CannotAuthenticate");
|
||||
permissionDeniedCodes.put(16704, "CannotAuthenticateToAdminDB");
|
||||
permissionDeniedCodes.put(16705, "CannotAuthenticateToAdminDB");
|
||||
|
||||
errorCodes = new HashMap<Integer, String>();
|
||||
errorCodes.putAll(dataAccessResourceFailureCodes);
|
||||
errorCodes.putAll(dataIntegrityViolationCodes);
|
||||
errorCodes.putAll(duplicateKeyCodes);
|
||||
errorCodes.putAll(invalidDataAccessApiUsageExeption);
|
||||
errorCodes.putAll(permissionDeniedCodes);
|
||||
}
|
||||
|
||||
public static boolean isDataIntegrityViolationCode(Integer errorCode) {
|
||||
return errorCode == null ? false : dataIntegrityViolationCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isDataAccessResourceFailureCode(Integer errorCode) {
|
||||
return errorCode == null ? false : dataAccessResourceFailureCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isDuplicateKeyCode(Integer errorCode) {
|
||||
return errorCode == null ? false : duplicateKeyCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isPermissionDeniedCode(Integer errorCode) {
|
||||
return errorCode == null ? false : permissionDeniedCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isInvalidDataAccessApiUsageCode(Integer errorCode) {
|
||||
return errorCode == null ? false : invalidDataAccessApiUsageExeption.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static String getErrorDescription(Integer errorCode) {
|
||||
return errorCode == null ? null : errorCodes.get(errorCode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
@@ -293,34 +292,6 @@ public interface MongoOperations {
|
||||
*/
|
||||
ScriptOperations scriptOps();
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given collection.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link BulkOperations} on the named collection
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given entity type.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityType the name of the entity class, must not be {@literal null}.
|
||||
* @return {@link BulkOperations} on the named collection associated of the given entity class.
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, Class<?> entityType);
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given entity type and collection name.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityClass the name of the entity class, must not be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link BulkOperations} on the named collection associated with the given entity class.
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, Class<?> entityType, String collectionName);
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the collection used by the entity class.
|
||||
* <p/>
|
||||
@@ -629,8 +600,8 @@ public interface MongoOperations {
|
||||
<T> T findById(Object id, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
@@ -641,8 +612,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
@@ -654,8 +625,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -668,8 +639,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -757,9 +728,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -814,9 +785,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection
|
||||
*/
|
||||
|
||||
@@ -60,7 +60,6 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
@@ -339,8 +338,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBCursor cursor = collection.find(mappedQuery, mappedFields);
|
||||
QueryCursorPreparer cursorPreparer = new QueryCursorPreparer(query, entityType);
|
||||
|
||||
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType,
|
||||
collection.getName());
|
||||
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType, collection
|
||||
.getName());
|
||||
|
||||
return new CloseableIterableCursorAdapter<T>(cursorPreparer.prepare(cursor), exceptionTranslator, readCallback);
|
||||
}
|
||||
@@ -373,8 +372,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
@Deprecated
|
||||
public CommandResult executeCommand(final DBObject command, final int options) {
|
||||
return executeCommand(command,
|
||||
(options & Bytes.QUERYOPTION_SLAVEOK) != 0 ? ReadPreference.secondaryPreferred() : ReadPreference.primary());
|
||||
return executeCommand(command, (options & Bytes.QUERYOPTION_SLAVEOK) != 0 ? ReadPreference.secondaryPreferred()
|
||||
: ReadPreference.primary());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -422,8 +421,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply
|
||||
* limits, skips and so on).
|
||||
*/
|
||||
protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch,
|
||||
CursorPreparer preparer) {
|
||||
protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch, CursorPreparer preparer) {
|
||||
|
||||
Assert.notNull(query);
|
||||
|
||||
@@ -546,28 +544,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return new DefaultIndexOperations(this, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) {
|
||||
return bulkOps(bulkMode, null, collectionName);
|
||||
}
|
||||
|
||||
public BulkOperations bulkOps(BulkMode bulkMode, Class<?> entityClass) {
|
||||
return bulkOps(bulkMode, entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public BulkOperations bulkOps(BulkMode mode, Class<?> entityType, String collectionName) {
|
||||
|
||||
Assert.notNull(mode, "BulkMode must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
DefaultBulkOperations operations = new DefaultBulkOperations(this, mode, collectionName, entityType);
|
||||
|
||||
operations.setExceptionTranslator(exceptionTranslator);
|
||||
operations.setWriteConcernResolver(writeConcernResolver);
|
||||
operations.setDefaultWriteConcern(writeConcern);
|
||||
|
||||
return operations;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#scriptOps()
|
||||
@@ -668,8 +644,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
List<Object> results = (List<Object>) commandResult.get("results");
|
||||
results = results == null ? Collections.emptyList() : results;
|
||||
|
||||
DbObjectCallback<GeoResult<T>> callback = new GeoNearResultDbObjectCallback<T>(
|
||||
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName), near.getMetric());
|
||||
DbObjectCallback<GeoResult<T>> callback = new GeoNearResultDbObjectCallback<T>(new ReadDbObjectCallback<T>(
|
||||
mongoConverter, entityClass, collectionName), near.getMetric());
|
||||
List<GeoResult<T>> result = new ArrayList<GeoResult<T>>(results.size());
|
||||
|
||||
int index = 0;
|
||||
@@ -745,9 +721,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
public long count(Query query, Class<?> entityClass, String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName);
|
||||
final DBObject dbObject = query == null ? null
|
||||
: queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entityClass == null ? null : mappingContext.getPersistentEntity(entityClass));
|
||||
final DBObject dbObject = query == null ? null : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entityClass == null ? null : mappingContext.getPersistentEntity(entityClass));
|
||||
|
||||
return execute(collectionName, new CollectionCallback<Long>() {
|
||||
public Long doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
@@ -1024,8 +999,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName,
|
||||
entityClass, dbDoc, null);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDoc)
|
||||
: collection.insert(dbDoc, writeConcernToUse);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDoc) : collection.insert(dbDoc,
|
||||
writeConcernToUse);
|
||||
handleAnyWriteResultErrors(writeResult, dbDoc, MongoActionOperation.INSERT);
|
||||
return dbDoc.get(ID_FIELD);
|
||||
}
|
||||
@@ -1045,8 +1020,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null,
|
||||
null, null);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDocList)
|
||||
: collection.insert(dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.insert(dbDocList) : collection.insert(
|
||||
dbDocList.toArray((DBObject[]) new BasicDBObject[dbDocList.size()]), writeConcernToUse);
|
||||
handleAnyWriteResultErrors(writeResult, null, MongoActionOperation.INSERT_LIST);
|
||||
return null;
|
||||
}
|
||||
@@ -1074,8 +1049,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass,
|
||||
dbDoc, null);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.save(dbDoc)
|
||||
: collection.save(dbDoc, writeConcernToUse);
|
||||
WriteResult writeResult = writeConcernToUse == null ? collection.save(dbDoc) : collection.save(dbDoc,
|
||||
writeConcernToUse);
|
||||
handleAnyWriteResultErrors(writeResult, dbDoc, MongoActionOperation.SAVE);
|
||||
return dbDoc.get(ID_FIELD);
|
||||
}
|
||||
@@ -1128,10 +1103,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject()
|
||||
: queryMapper.getMappedObject(query.getQueryObject(), entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
|
||||
@@ -1272,9 +1247,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Object idValue = persistentEntity.getPropertyAccessor(entity).getProperty(idProperty);
|
||||
|
||||
if (idValue == null && !MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(idProperty.getType())) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
String.format("Cannot autogenerate id of type %s for entity of type %s!", idProperty.getType().getName(),
|
||||
entity.getClass().getName()));
|
||||
throw new InvalidDataAccessApiUsageException(String.format(
|
||||
"Cannot autogenerate id of type %s for entity of type %s!", idProperty.getType().getName(), entity.getClass()
|
||||
.getName()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1313,12 +1288,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.",
|
||||
new Object[] { serializeToJsonSafely(dboq), collection.getName() });
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { serializeToJsonSafely(dboq),
|
||||
collection.getName() });
|
||||
}
|
||||
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq)
|
||||
: collection.remove(dboq, writeConcernToUse);
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) : collection.remove(dboq,
|
||||
writeConcernToUse);
|
||||
|
||||
handleAnyWriteResultErrors(wr, dboq, MongoActionOperation.REMOVE);
|
||||
|
||||
@@ -1334,8 +1309,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public <T> List<T> findAll(Class<T> entityClass, String collectionName) {
|
||||
return executeFindMultiInternal(new FindCallback(null), null,
|
||||
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName), collectionName);
|
||||
return executeFindMultiInternal(new FindCallback(null), null, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(String inputCollectionName, String mapFunction, String reduceFunction,
|
||||
@@ -1351,8 +1326,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
|
||||
String reduceFunction, Class<T> entityClass) {
|
||||
return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions().outputTypeInline(),
|
||||
entityClass);
|
||||
return mapReduce(query, inputCollectionName, mapFunction, reduceFunction,
|
||||
new MapReduceOptions().outputTypeInline(), entityClass);
|
||||
}
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
|
||||
@@ -1363,9 +1338,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBCollection inputCollection = getCollection(inputCollectionName);
|
||||
|
||||
MapReduceCommand command = new MapReduceCommand(inputCollection, mapFunc, reduceFunc,
|
||||
mapReduceOptions.getOutputCollection(), mapReduceOptions.getOutputType(),
|
||||
query == null || query.getQueryObject() == null ? null
|
||||
: queryMapper.getMappedObject(query.getQueryObject(), null));
|
||||
mapReduceOptions.getOutputCollection(), mapReduceOptions.getOutputType(), query == null
|
||||
|| query.getQueryObject() == null ? null : queryMapper.getMappedObject(query.getQueryObject(), null));
|
||||
|
||||
copyMapReduceOptionsToCommand(query, mapReduceOptions, command);
|
||||
|
||||
@@ -1701,8 +1675,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields),
|
||||
new ReadDbObjectCallback<T>(this.mongoConverter, entityClass, collectionName), collectionName);
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), new ReadDbObjectCallback<T>(
|
||||
this.mongoConverter, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1716,8 +1690,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @return the List of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass) {
|
||||
return doFind(collectionName, query, fields, entityClass, null,
|
||||
new ReadDbObjectCallback<T>(this.mongoConverter, entityClass, collectionName));
|
||||
return doFind(collectionName, query, fields, entityClass, null, new ReadDbObjectCallback<T>(this.mongoConverter,
|
||||
entityClass, collectionName));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1735,8 +1709,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass,
|
||||
CursorPreparer preparer) {
|
||||
return doFind(collectionName, query, fields, entityClass, preparer,
|
||||
new ReadDbObjectCallback<T>(mongoConverter, entityClass, collectionName));
|
||||
return doFind(collectionName, query, fields, entityClass, preparer, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass, collectionName));
|
||||
}
|
||||
|
||||
protected <S, T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<S> entityClass,
|
||||
@@ -1812,13 +1786,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER
|
||||
.debug(
|
||||
String.format(
|
||||
"findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
|
||||
+ "in collection: %s",
|
||||
serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate),
|
||||
collectionName));
|
||||
LOGGER.debug(String.format("findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
|
||||
+ "in collection: %s", serializeToJsonSafely(mappedQuery), fields, sort, entityClass,
|
||||
serializeToJsonSafely(mappedUpdate), collectionName));
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
@@ -1888,8 +1858,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DbObjectCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
T result = objectCallback
|
||||
.doWith(collectionCallback.doInCollection(getAndPrepareCollection(getDb(), collectionName)));
|
||||
T result = objectCallback.doWith(collectionCallback.doInCollection(getAndPrepareCollection(getDb(),
|
||||
collectionName)));
|
||||
return result;
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
@@ -1914,8 +1884,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
*/
|
||||
private <T> List<T> executeFindMultiInternal(CollectionCallback<DBCursor> collectionCallback, CursorPreparer preparer,
|
||||
DbObjectCallback<T> objectCallback, String collectionName) {
|
||||
private <T> List<T> executeFindMultiInternal(CollectionCallback<DBCursor> collectionCallback,
|
||||
CursorPreparer preparer, DbObjectCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
|
||||
@@ -2005,8 +1975,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
if (entity == null) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"No Persistent Entity information found for the class " + entityClass.getName());
|
||||
throw new InvalidDataAccessApiUsageException("No Persitent Entity information found for the class "
|
||||
+ entityClass.getName());
|
||||
}
|
||||
return entity.getCollection();
|
||||
}
|
||||
@@ -2070,8 +2040,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
String error = result.getErrorMessage();
|
||||
error = error == null ? "NO MESSAGE" : error;
|
||||
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"Command execution failed: Error [" + error + "], Command = " + source, ex);
|
||||
throw new InvalidDataAccessApiUsageException("Command execution failed: Error [" + error + "], Command = "
|
||||
+ source, ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2267,8 +2237,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
class UnwrapAndReadDbObjectCallback<T> extends ReadDbObjectCallback<T> {
|
||||
|
||||
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type,
|
||||
String collectionName) {
|
||||
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type, String collectionName) {
|
||||
super(reader, type, collectionName);
|
||||
}
|
||||
|
||||
@@ -2295,6 +2264,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
private enum DefaultWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
public WriteConcern resolve(MongoAction action) {
|
||||
return action.getDefaultWriteConcern();
|
||||
}
|
||||
}
|
||||
|
||||
class QueryCursorPreparer implements CursorPreparer {
|
||||
|
||||
private final Query query;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -42,6 +42,16 @@ import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.ThreeTenBackPortConverters;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToNamedMongoScriptCoverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.NamedMongoScriptToDBObjectConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToURLConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.TermToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.URLToStringConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.util.CacheValue;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -102,7 +112,17 @@ public class CustomConversions {
|
||||
// Add user provided converters to make sure they can override the defaults
|
||||
toRegister.addAll(converters);
|
||||
toRegister.add(CustomToStringConverter.INSTANCE);
|
||||
toRegister.addAll(MongoConverters.getConvertersToRegister());
|
||||
toRegister.add(BigDecimalToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigDecimalConverter.INSTANCE);
|
||||
toRegister.add(BigIntegerToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigIntegerConverter.INSTANCE);
|
||||
toRegister.add(URLToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToURLConverter.INSTANCE);
|
||||
toRegister.add(DBObjectToStringConverter.INSTANCE);
|
||||
toRegister.add(TermToStringConverter.INSTANCE);
|
||||
toRegister.add(NamedMongoScriptToDBObjectConverter.INSTANCE);
|
||||
toRegister.add(DBObjectToNamedMongoScriptCoverter.INSTANCE);
|
||||
|
||||
toRegister.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
toRegister.addAll(GeoConverters.getConvertersToRegister());
|
||||
toRegister.addAll(Jsr310Converters.getConvertersToRegister());
|
||||
@@ -166,8 +186,7 @@ public class CustomConversions {
|
||||
}
|
||||
|
||||
if (!added) {
|
||||
throw new IllegalArgumentException(
|
||||
"Given set contains element that is neither Converter nor ConverterFactory!");
|
||||
throw new IllegalArgumentException("Given set contains element that is neither Converter nor ConverterFactory!");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -397,10 +416,6 @@ public class CustomConversions {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.GenericConverter#getConvertibleTypes()
|
||||
*/
|
||||
public Set<ConvertiblePair> getConvertibleTypes() {
|
||||
|
||||
ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class);
|
||||
@@ -409,10 +424,6 @@ public class CustomConversions {
|
||||
return new HashSet<ConvertiblePair>(Arrays.asList(localeToString, booleanToString));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.GenericConverter#convert(java.lang.Object, org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor)
|
||||
*/
|
||||
public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
|
||||
return source.toString();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,26 +19,16 @@ import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Currency;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.ConditionalConverter;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.converter.ConverterFactory;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.core.query.Term;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -59,33 +49,6 @@ abstract class MongoConverters {
|
||||
*/
|
||||
private MongoConverters() {}
|
||||
|
||||
/**
|
||||
* Returns the converters to be registered.
|
||||
*
|
||||
* @return
|
||||
* @since 1.9
|
||||
*/
|
||||
public static Collection<Object> getConvertersToRegister() {
|
||||
|
||||
List<Object> converters = new ArrayList<Object>();
|
||||
|
||||
converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
converters.add(BigIntegerToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
converters.add(URLToStringConverter.INSTANCE);
|
||||
converters.add(StringToURLConverter.INSTANCE);
|
||||
converters.add(DBObjectToStringConverter.INSTANCE);
|
||||
converters.add(TermToStringConverter.INSTANCE);
|
||||
converters.add(NamedMongoScriptToDBObjectConverter.INSTANCE);
|
||||
converters.add(DBObjectToNamedMongoScriptCoverter.INSTANCE);
|
||||
converters.add(CurrencyToStringConverter.INSTANCE);
|
||||
converters.add(StringToCurrencyConverter.INSTANCE);
|
||||
converters.add(NumberToNumberConverterFactory.INSTANCE);
|
||||
|
||||
return converters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple singleton to convert {@link ObjectId}s to their {@link String} representation.
|
||||
*
|
||||
@@ -265,113 +228,4 @@ abstract class MongoConverters {
|
||||
return builder.get();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} implementation converting {@link Currency} into its ISO 4217 {@link String} representation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum CurrencyToStringConverter implements Converter<Currency, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(Currency source) {
|
||||
return source == null ? null : source.getCurrencyCode();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} implementation converting ISO 4217 {@link String} into {@link Currency}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum StringToCurrencyConverter implements Converter<String, Currency> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Currency convert(String source) {
|
||||
return StringUtils.hasText(source) ? Currency.getInstance(source) : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation using {@link NumberUtils} for number conversion and parsing. Additionally
|
||||
* deals with {@link AtomicInteger} and {@link AtomicLong} by calling {@code get()} before performing the actual
|
||||
* conversion.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum NumberToNumberConverterFactory implements ConverterFactory<Number, Number>,ConditionalConverter {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.ConverterFactory#getConverter(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T extends Number> Converter<Number, T> getConverter(Class<T> targetType) {
|
||||
return new NumberToNumberConverter<T>(targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.ConditionalConverter#matches(org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor)
|
||||
*/
|
||||
@Override
|
||||
public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) {
|
||||
return !sourceType.equals(targetType);
|
||||
}
|
||||
|
||||
private final static class NumberToNumberConverter<T extends Number> implements Converter<Number, T> {
|
||||
|
||||
private final Class<T> targetType;
|
||||
|
||||
/**
|
||||
* Creates a new {@link NumberToNumberConverter} for the given target type.
|
||||
*
|
||||
* @param targetType must not be {@literal null}.
|
||||
*/
|
||||
public NumberToNumberConverter(Class<T> targetType) {
|
||||
|
||||
Assert.notNull(targetType, "Target type must not be null!");
|
||||
|
||||
this.targetType = targetType;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public T convert(Number source) {
|
||||
|
||||
if (source instanceof AtomicInteger) {
|
||||
return NumberUtils.convertNumberToTargetClass(((AtomicInteger) source).get(), this.targetType);
|
||||
}
|
||||
|
||||
if (source instanceof AtomicLong) {
|
||||
return NumberUtils.convertNumberToTargetClass(((AtomicLong) source).get(), this.targetType);
|
||||
}
|
||||
|
||||
return NumberUtils.convertNumberToTargetClass(source, this.targetType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,9 +29,8 @@ import java.lang.annotation.Target;
|
||||
* @author Johno Crawford
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
*/
|
||||
@Target({ElementType.ANNOTATION_TYPE, ElementType.FIELD})
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface Indexed {
|
||||
|
||||
|
||||
@@ -26,11 +26,11 @@ import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator.MongoDbErrorCodes;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
|
||||
@@ -26,7 +26,6 @@ import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.expression.BeanFactoryAccessor;
|
||||
import org.springframework.context.expression.BeanFactoryResolver;
|
||||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
@@ -78,7 +77,7 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
Class<?> rawType = typeInformation.getType();
|
||||
String fallback = MongoCollectionUtils.getPreferredCollectionName(rawType);
|
||||
|
||||
Document document = AnnotationUtils.findAnnotation(rawType, Document.class);
|
||||
Document document = rawType.getAnnotation(Document.class);
|
||||
|
||||
this.expression = detectExpression(document);
|
||||
this.context = new StandardEvaluationContext();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,25 +15,31 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoPage;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.CollectionExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.DeleteExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.GeoNearExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagedExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagingGeoNearExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.ResultProcessingConverter;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.ResultProcessingExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SingleEntityExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SlicedExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.StreamExecution;
|
||||
import org.springframework.data.repository.query.ParameterAccessor;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
/**
|
||||
* Base class for {@link RepositoryQuery} implementations for Mongo.
|
||||
*
|
||||
@@ -45,7 +51,6 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
private final MongoQueryMethod method;
|
||||
private final MongoOperations operations;
|
||||
private final EntityInstantiators instantiators;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AbstractMongoQuery} from the given {@link MongoQueryMethod} and {@link MongoOperations}.
|
||||
@@ -60,7 +65,6 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
this.method = method;
|
||||
this.operations = operations;
|
||||
this.instantiators = new EntityInstantiators();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -82,53 +86,30 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
|
||||
ResultProcessor processor = method.getResultProcessor().withDynamicProjection(accessor);
|
||||
String collection = method.getEntityInformation().getCollectionName();
|
||||
|
||||
MongoQueryExecution execution = getExecution(query, accessor,
|
||||
new ResultProcessingConverter(processor, operations, instantiators));
|
||||
|
||||
return execution.execute(query, processor.getReturnedType().getDomainType(), collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the execution instance to use.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param parameters must not be {@literal null}.
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private MongoQueryExecution getExecution(Query query, MongoParameterAccessor accessor,
|
||||
Converter<Object, Object> resultProcessing) {
|
||||
|
||||
if (method.isStreamQuery()) {
|
||||
return new StreamExecution(operations, resultProcessing);
|
||||
}
|
||||
|
||||
return new ResultProcessingExecution(getExecutionToWrap(query, accessor), resultProcessing);
|
||||
}
|
||||
|
||||
private MongoQueryExecution getExecutionToWrap(Query query, MongoParameterAccessor accessor) {
|
||||
|
||||
if (isDeleteQuery()) {
|
||||
return new DeleteExecution(operations, method);
|
||||
return new StreamExecution().execute(query);
|
||||
} else if (isDeleteQuery()) {
|
||||
return new DeleteExecution().execute(query);
|
||||
} else if (method.isGeoNearQuery() && method.isPageQuery()) {
|
||||
return new PagingGeoNearExecution(operations, accessor, method.getReturnType(), this);
|
||||
|
||||
MongoParameterAccessor countAccessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query countQuery = createCountQuery(new ConvertingParameterAccessor(operations.getConverter(), countAccessor));
|
||||
|
||||
return new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
} else if (method.isGeoNearQuery()) {
|
||||
return new GeoNearExecution(operations, accessor, method.getReturnType());
|
||||
return new GeoNearExecution(accessor).execute(query);
|
||||
} else if (method.isSliceQuery()) {
|
||||
return new SlicedExecution(operations, accessor.getPageable());
|
||||
return new SlicedExecution(accessor.getPageable()).execute(query);
|
||||
} else if (method.isCollectionQuery()) {
|
||||
return new CollectionExecution(operations, accessor.getPageable());
|
||||
return new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
} else if (method.isPageQuery()) {
|
||||
return new PagedExecution(operations, accessor.getPageable());
|
||||
return new PagedExecution(accessor.getPageable()).execute(query);
|
||||
} else {
|
||||
return new SingleEntityExecution(operations, isCountQuery());
|
||||
return new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
}
|
||||
}
|
||||
|
||||
Query applyQueryMetaAttributesWhenPresent(Query query) {
|
||||
private Query applyQueryMetaAttributesWhenPresent(Query query) {
|
||||
|
||||
if (method.hasQueryMetaAttributes()) {
|
||||
query.setMeta(method.getQueryMetaAttributes());
|
||||
@@ -146,7 +127,12 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
* @return
|
||||
*/
|
||||
protected Query createCountQuery(ConvertingParameterAccessor accessor) {
|
||||
return applyQueryMetaAttributesWhenPresent(createQuery(accessor));
|
||||
|
||||
Query query = createQuery(accessor);
|
||||
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -171,4 +157,292 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
* @since 1.5
|
||||
*/
|
||||
protected abstract boolean isDeleteQuery();
|
||||
|
||||
private abstract class Execution {
|
||||
|
||||
abstract Object execute(Query query);
|
||||
|
||||
protected List<?> readCollection(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
|
||||
String collectionName = metadata.getCollectionName();
|
||||
return operations.find(query, metadata.getJavaType(), collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for collection returning queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class CollectionExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
CollectionExecution(Pageable pageable) {
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query) {
|
||||
return readCollection(query.with(pageable));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for {@link Slice} query methods.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
|
||||
final class SlicedExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
SlicedExecution(Pageable pageable) {
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
int pageSize = pageable.getPageSize();
|
||||
|
||||
// Apply Pageable but tweak limit to peek into next page
|
||||
Query modifiedQuery = query.with(pageable).limit(pageSize + 1);
|
||||
|
||||
List result = operations.find(modifiedQuery, metadata.getJavaType(), metadata.getCollectionName());
|
||||
|
||||
boolean hasNext = result.size() > pageSize;
|
||||
|
||||
return new SliceImpl<Object>(hasNext ? result.subList(0, pageSize) : result, pageable, hasNext);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for pagination queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class PagedExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PagedExecution}.
|
||||
*
|
||||
* @param pageable
|
||||
*/
|
||||
public PagedExecution(Pageable pageable) {
|
||||
|
||||
Assert.notNull(pageable);
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
String collectionName = metadata.getCollectionName();
|
||||
Class<?> type = metadata.getJavaType();
|
||||
|
||||
int overallLimit = query.getLimit();
|
||||
long count = operations.count(query, type, collectionName);
|
||||
count = overallLimit != 0 ? Math.min(count, query.getLimit()) : count;
|
||||
|
||||
boolean pageableOutOfScope = pageable.getOffset() > count;
|
||||
|
||||
if (pageableOutOfScope) {
|
||||
return new PageImpl<Object>(Collections.emptyList(), pageable, count);
|
||||
}
|
||||
|
||||
// Apply raw pagination
|
||||
query = query.with(pageable);
|
||||
|
||||
// Adjust limit if page would exceed the overall limit
|
||||
if (overallLimit != 0 && pageable.getOffset() + pageable.getPageSize() > overallLimit) {
|
||||
query.limit(overallLimit - pageable.getOffset());
|
||||
}
|
||||
|
||||
List<?> result = operations.find(query, type, collectionName);
|
||||
return new PageImpl(result, pageable, count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} to return a single entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class SingleEntityExecution extends Execution {
|
||||
|
||||
private final boolean countProjection;
|
||||
|
||||
private SingleEntityExecution(boolean countProjection) {
|
||||
this.countProjection = countProjection;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return countProjection ? operations.count(query, metadata.getJavaType())
|
||||
: operations.findOne(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} to execute geo-near queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class GeoNearExecution extends Execution {
|
||||
|
||||
private final MongoParameterAccessor accessor;
|
||||
|
||||
public GeoNearExecution(MongoParameterAccessor accessor) {
|
||||
this.accessor = accessor;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
GeoResults<?> results = doExecuteQuery(query);
|
||||
return isListOfGeoResult() ? results.getContent() : results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} to return a page.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param countQuery must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object execute(Query query, Query countQuery) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
long count = operations.count(countQuery, metadata.getCollectionName());
|
||||
|
||||
return new GeoPage<Object>(doExecuteQuery(query), accessor.getPageable(), count);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private GeoResults<Object> doExecuteQuery(Query query) {
|
||||
|
||||
Point nearLocation = accessor.getGeoNearLocation();
|
||||
NearQuery nearQuery = NearQuery.near(nearLocation);
|
||||
|
||||
if (query != null) {
|
||||
nearQuery.query(query);
|
||||
}
|
||||
|
||||
Range<Distance> distances = accessor.getDistanceRange();
|
||||
Distance maxDistance = distances.getUpperBound();
|
||||
|
||||
if (maxDistance != null) {
|
||||
nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric());
|
||||
}
|
||||
|
||||
Distance minDistance = distances.getLowerBound();
|
||||
|
||||
if (minDistance != null) {
|
||||
nearQuery.minDistance(minDistance).in(minDistance.getMetric());
|
||||
}
|
||||
|
||||
Pageable pageable = accessor.getPageable();
|
||||
if (pageable != null) {
|
||||
nearQuery.with(pageable);
|
||||
}
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return (GeoResults<Object>) operations.geoNear(nearQuery, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
private boolean isListOfGeoResult() {
|
||||
|
||||
TypeInformation<?> returnType = method.getReturnType();
|
||||
|
||||
if (!returnType.getType().equals(List.class)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TypeInformation<?> componentType = returnType.getComponentType();
|
||||
return componentType == null ? false : GeoResult.class.equals(componentType.getType());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} removing documents matching the query.
|
||||
*
|
||||
* @since 1.5
|
||||
*/
|
||||
final class DeleteExecution extends Execution {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return deleteAndConvertResult(query, metadata);
|
||||
}
|
||||
|
||||
private Object deleteAndConvertResult(Query query, MongoEntityMetadata<?> metadata) {
|
||||
|
||||
if (method.isCollectionQuery()) {
|
||||
return operations.findAllAndRemove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
WriteResult writeResult = operations.remove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
return writeResult != null ? writeResult.getN() : 0L;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
* @since 1.7
|
||||
*/
|
||||
final class StreamExecution extends Execution {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
Object execute(Query query) {
|
||||
|
||||
Class<?> entityType = getQueryMethod().getEntityInformation().getJavaType();
|
||||
|
||||
return StreamUtils.createStreamFromIterator((CloseableIterator<Object>) operations.stream(query, entityType));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,15 +90,6 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
|
||||
return delegate.getSort();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.ParameterAccessor#getDynamicProjection()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getDynamicProjection() {
|
||||
return delegate.getDynamicProjection();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.ParameterAccessor#getBindableValue(int)
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PreferredConstructor.Parameter;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ParameterValueProvider;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link Converter} to instantiate DTOs from fully equipped domain objects.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class DtoInstantiatingConverter implements Converter<Object, Object> {
|
||||
|
||||
private final Class<?> targetType;
|
||||
private final MappingContext<? extends PersistentEntity<?, ?>, ? extends PersistentProperty<?>> context;
|
||||
private final EntityInstantiator instantiator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Converter} to instantiate DTOs.
|
||||
*
|
||||
* @param dtoType must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @param instantiators must not be {@literal null}.
|
||||
*/
|
||||
public DtoInstantiatingConverter(Class<?> dtoType,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
EntityInstantiators instantiator) {
|
||||
|
||||
Assert.notNull(dtoType, "DTO type must not be null!");
|
||||
Assert.notNull(context, "MappingContext must not be null!");
|
||||
Assert.notNull(instantiator, "EntityInstantiators must not be null!");
|
||||
|
||||
this.targetType = dtoType;
|
||||
this.context = context;
|
||||
this.instantiator = instantiator.getInstantiatorFor(context.getPersistentEntity(dtoType));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Object convert(Object source) {
|
||||
|
||||
if (targetType.isInterface()) {
|
||||
return source;
|
||||
}
|
||||
|
||||
final PersistentEntity<?, ?> sourceEntity = context.getPersistentEntity(source.getClass());
|
||||
final PersistentPropertyAccessor sourceAccessor = sourceEntity.getPropertyAccessor(source);
|
||||
final PersistentEntity<?, ?> targetEntity = context.getPersistentEntity(targetType);
|
||||
final PreferredConstructor<?, ? extends PersistentProperty<?>> constructor = targetEntity
|
||||
.getPersistenceConstructor();
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
Object dto = instantiator.createInstance(targetEntity, new ParameterValueProvider() {
|
||||
|
||||
@Override
|
||||
public Object getParameterValue(Parameter parameter) {
|
||||
return sourceAccessor.getProperty(sourceEntity.getPersistentProperty(parameter.getName()));
|
||||
}
|
||||
});
|
||||
|
||||
final PersistentPropertyAccessor dtoAccessor = targetEntity.getPropertyAccessor(dto);
|
||||
|
||||
targetEntity.doWithProperties(new SimplePropertyHandler() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(PersistentProperty<?> property) {
|
||||
|
||||
if (constructor.isConstructorParameter(property)) {
|
||||
return;
|
||||
}
|
||||
|
||||
dtoAccessor.setProperty(property,
|
||||
sourceAccessor.getProperty(sourceEntity.getPersistentProperty(property.getName())));
|
||||
}
|
||||
});
|
||||
|
||||
return dto;
|
||||
}
|
||||
}
|
||||
@@ -1,231 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
|
||||
import org.bson.BSON;
|
||||
import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBinding;
|
||||
import org.springframework.data.repository.query.EvaluationContextProvider;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
* {@link ExpressionEvaluatingParameterBinder} allows to evaluate, convert and bind parameters to placholders within a
|
||||
* {@link String}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
class ExpressionEvaluatingParameterBinder {
|
||||
|
||||
private final SpelExpressionParser expressionParser;
|
||||
private final EvaluationContextProvider evaluationContextProvider;
|
||||
|
||||
/**
|
||||
* Creates new {@link ExpressionEvaluatingParameterBinder}
|
||||
*
|
||||
* @param expressionParser must not be {@literal null}.
|
||||
* @param evaluationContextProvider must not be {@literal null}.
|
||||
*/
|
||||
public ExpressionEvaluatingParameterBinder(SpelExpressionParser expressionParser,
|
||||
EvaluationContextProvider evaluationContextProvider) {
|
||||
|
||||
Assert.notNull(expressionParser, "ExpressionParser must not be null!");
|
||||
Assert.notNull(evaluationContextProvider, "EvaluationContextProvider must not be null!");
|
||||
|
||||
this.expressionParser = expressionParser;
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bind values provided by {@link MongoParameterAccessor} to placeholders in {@literal raw} while considering
|
||||
* potential conversions and parameter types.
|
||||
*
|
||||
* @param raw can be {@literal null} or empty.
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @param bindingContext must not be {@literal null}.
|
||||
* @return {@literal null} if given {@code raw} value is empty.
|
||||
*/
|
||||
public String bind(String raw, MongoParameterAccessor accessor, BindingContext bindingContext) {
|
||||
|
||||
if (!StringUtils.hasText(raw)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return replacePlaceholders(raw, accessor, bindingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaced the parameter placeholders with the actual parameter values from the given {@link ParameterBinding}s.
|
||||
*
|
||||
* @param input must not be {@literal null} or empty.
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @param bindings must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String replacePlaceholders(String input, MongoParameterAccessor accessor, BindingContext bindingContext) {
|
||||
|
||||
if (!bindingContext.hasBindings()) {
|
||||
return input;
|
||||
}
|
||||
|
||||
boolean isCompletlyParameterizedQuery = input.matches("^\\?\\d+$");
|
||||
StringBuilder result = new StringBuilder(input);
|
||||
|
||||
for (ParameterBinding binding : bindingContext.getBindings()) {
|
||||
|
||||
String parameter = binding.getParameter();
|
||||
int idx = result.indexOf(parameter);
|
||||
|
||||
if (idx == -1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
String valueForBinding = getParameterValueForBinding(accessor, bindingContext.getParameters(), binding);
|
||||
|
||||
int start = idx;
|
||||
int end = idx + parameter.length();
|
||||
|
||||
// If the value to bind is an object literal we need to remove the quoting around the expression insertion point.
|
||||
if (valueForBinding.startsWith("{") && !isCompletlyParameterizedQuery) {
|
||||
|
||||
// Is the insertion point actually surrounded by quotes?
|
||||
char beforeStart = result.charAt(start - 1);
|
||||
char afterEnd = result.charAt(end);
|
||||
|
||||
if ((beforeStart == '\'' || beforeStart == '"') && (afterEnd == '\'' || afterEnd == '"')) {
|
||||
|
||||
// Skip preceding and following quote
|
||||
start -= 1;
|
||||
end += 1;
|
||||
}
|
||||
}
|
||||
|
||||
result.replace(start, end, valueForBinding);
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the serialized value to be used for the given {@link ParameterBinding}.
|
||||
*
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @param parameters
|
||||
* @param binding must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getParameterValueForBinding(MongoParameterAccessor accessor, MongoParameters parameters,
|
||||
ParameterBinding binding) {
|
||||
|
||||
Object value = binding.isExpression()
|
||||
? evaluateExpression(binding.getExpression(), parameters, accessor.getValues())
|
||||
: accessor.getBindableValue(binding.getParameterIndex());
|
||||
|
||||
if (value instanceof String && binding.isQuoted()) {
|
||||
return (String) value;
|
||||
}
|
||||
|
||||
if (value instanceof byte[]) {
|
||||
|
||||
String base64representation = DatatypeConverter.printBase64Binary((byte[]) value);
|
||||
|
||||
if (!binding.isQuoted()) {
|
||||
return "{ '$binary' : '" + base64representation + "', '$type' : " + BSON.B_GENERAL + "}";
|
||||
}
|
||||
|
||||
return base64representation;
|
||||
}
|
||||
|
||||
return JSON.serialize(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates the given {@code expressionString}.
|
||||
*
|
||||
* @param expressionString must not be {@literal null} or empty.
|
||||
* @param parameters must not be {@literal null}.
|
||||
* @param parameterValues must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Object evaluateExpression(String expressionString, MongoParameters parameters, Object[] parameterValues) {
|
||||
|
||||
EvaluationContext evaluationContext = evaluationContextProvider.getEvaluationContext(parameters, parameterValues);
|
||||
Expression expression = expressionParser.parseExpression(expressionString);
|
||||
|
||||
return expression.getValue(evaluationContext, Object.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
static class BindingContext {
|
||||
|
||||
final MongoParameters parameters;
|
||||
final List<ParameterBinding> bindings;
|
||||
|
||||
/**
|
||||
* Creates new {@link BindingContext}.
|
||||
*
|
||||
* @param parameters
|
||||
* @param bindings
|
||||
*/
|
||||
public BindingContext(MongoParameters parameters, List<ParameterBinding> bindings) {
|
||||
|
||||
this.parameters = parameters;
|
||||
this.bindings = bindings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} when list of bindings is not empty.
|
||||
*/
|
||||
boolean hasBindings() {
|
||||
return !CollectionUtils.isEmpty(bindings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unmodifiable list of {@link ParameterBinding}s.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public List<ParameterBinding> getBindings() {
|
||||
return Collections.unmodifiableList(bindings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the associated {@link MongoParameters}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public MongoParameters getParameters() {
|
||||
return parameters;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -169,31 +169,32 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @param parameters
|
||||
* @return
|
||||
*/
|
||||
private Criteria from(Part part, MongoPersistentProperty property, Criteria criteria, Iterator<Object> parameters) {
|
||||
private Criteria from(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
|
||||
Type type = part.getType();
|
||||
|
||||
switch (type) {
|
||||
case AFTER:
|
||||
case GREATER_THAN:
|
||||
return criteria.gt(parameters.next());
|
||||
return criteria.gt(parameters.nextConverted(property));
|
||||
case GREATER_THAN_EQUAL:
|
||||
return criteria.gte(parameters.next());
|
||||
return criteria.gte(parameters.nextConverted(property));
|
||||
case BEFORE:
|
||||
case LESS_THAN:
|
||||
return criteria.lt(parameters.next());
|
||||
return criteria.lt(parameters.nextConverted(property));
|
||||
case LESS_THAN_EQUAL:
|
||||
return criteria.lte(parameters.next());
|
||||
return criteria.lte(parameters.nextConverted(property));
|
||||
case BETWEEN:
|
||||
return criteria.gt(parameters.next()).lt(parameters.next());
|
||||
return criteria.gt(parameters.nextConverted(property)).lt(parameters.nextConverted(property));
|
||||
case IS_NOT_NULL:
|
||||
return criteria.ne(null);
|
||||
case IS_NULL:
|
||||
return criteria.is(null);
|
||||
case NOT_IN:
|
||||
return criteria.nin(nextAsArray(parameters));
|
||||
return criteria.nin(nextAsArray(parameters, property));
|
||||
case IN:
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
case LIKE:
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
@@ -240,12 +241,12 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return criteria.within((Shape) parameter);
|
||||
case SIMPLE_PROPERTY:
|
||||
|
||||
return isSimpleComparisionPossible(part) ? criteria.is(parameters.next())
|
||||
return isSimpleComparisionPossible(part) ? criteria.is(parameters.nextConverted(property))
|
||||
: createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, false);
|
||||
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
|
||||
return isSimpleComparisionPossible(part) ? criteria.ne(parameters.next())
|
||||
return isSimpleComparisionPossible(part) ? criteria.ne(parameters.nextConverted(property))
|
||||
: createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, true);
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported keyword!");
|
||||
@@ -277,7 +278,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @return the criteria extended with the like-regex.
|
||||
*/
|
||||
private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
Iterator<Object> parameters, boolean shouldNegateExpression) {
|
||||
PotentiallyConvertingIterator parameters, boolean shouldNegateExpression) {
|
||||
|
||||
PropertyPath path = part.getProperty().getLeafProperty();
|
||||
|
||||
@@ -296,7 +297,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
criteria = criteria.not();
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.nextConverted(property).toString());
|
||||
|
||||
case NEVER:
|
||||
// intentional no-op
|
||||
@@ -318,10 +319,10 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @return
|
||||
*/
|
||||
private Criteria createContainingCriteria(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
Iterator<Object> parameters) {
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
@@ -376,9 +377,8 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(Iterator<Object> iterator) {
|
||||
|
||||
Object next = iterator.next();
|
||||
private Object[] nextAsArray(PotentiallyConvertingIterator iterator, MongoPersistentProperty property) {
|
||||
Object next = iterator.nextConverted(property);
|
||||
|
||||
if (next instanceof Collection) {
|
||||
return ((Collection<?>) next).toArray();
|
||||
|
||||
@@ -1,381 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoPage;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.data.repository.query.ReturnedType;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
interface MongoQueryExecution {
|
||||
|
||||
Object execute(Query query, Class<?> type, String collection);
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} for collection returning queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class CollectionExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final Pageable pageable;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
return operations.find(query.with(pageable), type, collection);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} for {@link Slice} query methods.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class SlicedExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final @NonNull Pageable pageable;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
int pageSize = pageable.getPageSize();
|
||||
|
||||
// Apply Pageable but tweak limit to peek into next page
|
||||
Query modifiedQuery = query.with(pageable).limit(pageSize + 1);
|
||||
List result = operations.find(modifiedQuery, type, collection);
|
||||
|
||||
boolean hasNext = result.size() > pageSize;
|
||||
|
||||
return new SliceImpl<Object>(hasNext ? result.subList(0, pageSize) : result, pageable, hasNext);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} for pagination queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class PagedExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final @NonNull Pageable pageable;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
int overallLimit = query.getLimit();
|
||||
long count = operations.count(query, type, collection);
|
||||
count = overallLimit != 0 ? Math.min(count, query.getLimit()) : count;
|
||||
|
||||
boolean pageableOutOfScope = pageable.getOffset() > count;
|
||||
|
||||
if (pageableOutOfScope) {
|
||||
return new PageImpl<Object>(Collections.emptyList(), pageable, count);
|
||||
}
|
||||
|
||||
// Apply raw pagination
|
||||
query = query.with(pageable);
|
||||
|
||||
// Adjust limit if page would exceed the overall limit
|
||||
if (overallLimit != 0 && pageable.getOffset() + pageable.getPageSize() > overallLimit) {
|
||||
query.limit(overallLimit - pageable.getOffset());
|
||||
}
|
||||
|
||||
List<?> result = operations.find(query, type, collection);
|
||||
return new PageImpl(result, pageable, count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} to return a single entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class SingleEntityExecution implements MongoQueryExecution {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final boolean countProjection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
return countProjection ? operations.count(query, type, collection) : operations.findOne(query, type, collection);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} to execute geo-near queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class GeoNearExecution implements MongoQueryExecution {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final TypeInformation<?> returnType;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
GeoResults<?> results = doExecuteQuery(query, type, collection);
|
||||
return isListOfGeoResult() ? results.getContent() : results;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected GeoResults<Object> doExecuteQuery(Query query, Class<?> type, String collection) {
|
||||
|
||||
Point nearLocation = accessor.getGeoNearLocation();
|
||||
NearQuery nearQuery = NearQuery.near(nearLocation);
|
||||
|
||||
if (query != null) {
|
||||
nearQuery.query(query);
|
||||
}
|
||||
|
||||
Range<Distance> distances = accessor.getDistanceRange();
|
||||
Distance maxDistance = distances.getUpperBound();
|
||||
|
||||
if (maxDistance != null) {
|
||||
nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric());
|
||||
}
|
||||
|
||||
Distance minDistance = distances.getLowerBound();
|
||||
|
||||
if (minDistance != null) {
|
||||
nearQuery.minDistance(minDistance).in(minDistance.getMetric());
|
||||
}
|
||||
|
||||
Pageable pageable = accessor.getPageable();
|
||||
|
||||
if (pageable != null) {
|
||||
nearQuery.with(pageable);
|
||||
}
|
||||
|
||||
return (GeoResults<Object>) operations.geoNear(nearQuery, type, collection);
|
||||
}
|
||||
|
||||
private boolean isListOfGeoResult() {
|
||||
|
||||
if (!returnType.getType().equals(List.class)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TypeInformation<?> componentType = returnType.getComponentType();
|
||||
return componentType == null ? false : GeoResult.class.equals(componentType.getType());
|
||||
}
|
||||
}
|
||||
|
||||
static final class PagingGeoNearExecution extends GeoNearExecution {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final AbstractMongoQuery mongoQuery;
|
||||
|
||||
public PagingGeoNearExecution(MongoOperations operations, MongoParameterAccessor accessor,
|
||||
TypeInformation<?> returnType, AbstractMongoQuery query) {
|
||||
|
||||
super(operations, accessor, returnType);
|
||||
|
||||
this.accessor = accessor;
|
||||
this.operations = operations;
|
||||
this.mongoQuery = query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} to return a page.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param countQuery must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
ConvertingParameterAccessor parameterAccessor = new ConvertingParameterAccessor(operations.getConverter(),
|
||||
accessor);
|
||||
Query countQuery = mongoQuery.applyQueryMetaAttributesWhenPresent(mongoQuery.createCountQuery(parameterAccessor));
|
||||
long count = operations.count(countQuery, collection);
|
||||
|
||||
return new GeoPage<Object>(doExecuteQuery(query, type, collection), accessor.getPageable(), count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} removing documents matching the query.
|
||||
*
|
||||
* @since 1.5
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class DeleteExecution implements MongoQueryExecution {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final MongoQueryMethod method;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
if (method.isCollectionQuery()) {
|
||||
return operations.findAllAndRemove(query, type, collection);
|
||||
}
|
||||
|
||||
WriteResult writeResult = operations.remove(query, type, collection);
|
||||
return writeResult != null ? writeResult.getN() : 0L;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
* @since 1.7
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class StreamExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final @NonNull Converter<Object, Object> resultProcessing;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
|
||||
return StreamUtils.createStreamFromIterator((CloseableIterator<Object>) operations.stream(query, type))
|
||||
.map(new Function<Object, Object>() {
|
||||
|
||||
@Override
|
||||
public Object apply(Object t) {
|
||||
return resultProcessing.convert(t);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link MongoQueryExecution} that wraps the results of the given delegate with the given result processing.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class ResultProcessingExecution implements MongoQueryExecution {
|
||||
|
||||
private final @NonNull MongoQueryExecution delegate;
|
||||
private final @NonNull Converter<Object, Object> converter;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
return converter.convert(delegate.execute(query, type, collection));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Converter} to post-process all source objects using the given {@link ResultProcessor}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static final class ResultProcessingConverter implements Converter<Object, Object> {
|
||||
|
||||
private final @NonNull ResultProcessor processor;
|
||||
private final @NonNull MongoOperations operations;
|
||||
private final @NonNull EntityInstantiators instantiators;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Object convert(Object source) {
|
||||
|
||||
ReturnedType returnedType = processor.getReturnedType();
|
||||
|
||||
if (ClassUtils.isPrimitiveOrWrapper(returnedType.getReturnedType())) {
|
||||
return source;
|
||||
}
|
||||
|
||||
Converter<Object, Object> converter = new DtoInstantiatingConverter(returnedType.getReturnedType(),
|
||||
operations.getConverter().getMappingContext(), instantiators);
|
||||
|
||||
return processor.processResult(source, converter);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,7 +29,6 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.repository.Meta;
|
||||
import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
@@ -57,15 +56,12 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
/**
|
||||
* Creates a new {@link MongoQueryMethod} from the given {@link Method}.
|
||||
*
|
||||
* @param method must not be {@literal null}.
|
||||
* @param metadata must not be {@literal null}.
|
||||
* @param projectionFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param method
|
||||
*/
|
||||
public MongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory,
|
||||
public MongoQueryMethod(Method method, RepositoryMetadata metadata,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(method, metadata, projectionFactory);
|
||||
super(method, metadata);
|
||||
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
|
||||
@@ -136,8 +132,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
|
||||
MongoPersistentEntity<?> returnedEntity = mappingContext.getPersistentEntity(returnedObjectType);
|
||||
MongoPersistentEntity<?> managedEntity = mappingContext.getPersistentEntity(domainClass);
|
||||
returnedEntity = returnedEntity == null || returnedEntity.getType().isInterface() ? managedEntity
|
||||
: returnedEntity;
|
||||
returnedEntity = returnedEntity == null ? managedEntity : returnedEntity;
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
|
||||
|
||||
@@ -20,13 +20,10 @@ import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Field;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.TextCriteria;
|
||||
import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.data.repository.query.ReturnedType;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -44,7 +41,6 @@ public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
private final PartTree tree;
|
||||
private final boolean isGeoNearQuery;
|
||||
private final MappingContext<?, MongoPersistentProperty> context;
|
||||
private final ResultProcessor processor;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}.
|
||||
@@ -55,9 +51,7 @@ public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations) {
|
||||
|
||||
super(method, mongoOperations);
|
||||
|
||||
this.processor = method.getResultProcessor();
|
||||
this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType());
|
||||
this.tree = new PartTree(method.getName(), method.getEntityInformation().getJavaType());
|
||||
this.isGeoNearQuery = method.isGeoNearQuery();
|
||||
this.context = mongoOperations.getConverter().getMappingContext();
|
||||
}
|
||||
@@ -93,18 +87,6 @@ public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
String fieldSpec = this.getQueryMethod().getFieldSpecification();
|
||||
|
||||
if (!StringUtils.hasText(fieldSpec)) {
|
||||
|
||||
ReturnedType returnedType = processor.withDynamicProjection(accessor).getReturnedType();
|
||||
|
||||
if (returnedType.isProjecting()) {
|
||||
|
||||
Field fields = query.fields();
|
||||
|
||||
for (String field : returnedType.getInputProperties()) {
|
||||
fields.include(field);
|
||||
}
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,13 +21,17 @@ import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
|
||||
import org.bson.BSON;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.query.ExpressionEvaluatingParameterBinder.BindingContext;
|
||||
import org.springframework.data.repository.query.EvaluationContextProvider;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -55,7 +59,8 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
private final boolean isDeleteQuery;
|
||||
private final List<ParameterBinding> queryParameterBindings;
|
||||
private final List<ParameterBinding> fieldSpecParameterBindings;
|
||||
private final ExpressionEvaluatingParameterBinder parameterBinder;
|
||||
private final SpelExpressionParser expressionParser;
|
||||
private final EvaluationContextProvider evaluationContextProvider;
|
||||
|
||||
/**
|
||||
* Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}.
|
||||
@@ -87,6 +92,9 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(expressionParser, "SpelExpressionParser must not be null!");
|
||||
|
||||
this.expressionParser = expressionParser;
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
|
||||
this.queryParameterBindings = new ArrayList<ParameterBinding>();
|
||||
this.query = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(query,
|
||||
this.queryParameterBindings);
|
||||
@@ -101,8 +109,6 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
if (isCountQuery && isDeleteQuery) {
|
||||
throw new IllegalArgumentException(String.format(COUND_AND_DELETE, method));
|
||||
}
|
||||
|
||||
this.parameterBinder = new ExpressionEvaluatingParameterBinder(expressionParser, evaluationContextProvider);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -112,15 +118,21 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
@Override
|
||||
protected Query createQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
String queryString = parameterBinder.bind(this.query, accessor, new BindingContext(getQueryMethod()
|
||||
.getParameters(), queryParameterBindings));
|
||||
String fieldsString = parameterBinder.bind(this.fieldSpec, accessor, new BindingContext(getQueryMethod()
|
||||
.getParameters(), fieldSpecParameterBindings));
|
||||
String queryString = replacePlaceholders(query, accessor, queryParameterBindings);
|
||||
|
||||
Query query = new BasicQuery(queryString, fieldsString).with(accessor.getSort());
|
||||
Query query = null;
|
||||
|
||||
if (fieldSpec != null) {
|
||||
String fieldString = replacePlaceholders(fieldSpec, accessor, fieldSpecParameterBindings);
|
||||
query = new BasicQuery(queryString, fieldString);
|
||||
} else {
|
||||
query = new BasicQuery(queryString);
|
||||
}
|
||||
|
||||
query.with(accessor.getSort());
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject()));
|
||||
LOG.debug(String.format("Created query %s", query.getQueryObject()));
|
||||
}
|
||||
|
||||
return query;
|
||||
@@ -144,6 +156,105 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
return this.isDeleteQuery;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaced the parameter place-holders with the actual parameter values from the given {@link ParameterBinding}s.
|
||||
*
|
||||
* @param input
|
||||
* @param accessor
|
||||
* @param bindings
|
||||
* @return
|
||||
*/
|
||||
private String replacePlaceholders(String input, ConvertingParameterAccessor accessor,
|
||||
List<ParameterBinding> bindings) {
|
||||
|
||||
if (bindings.isEmpty()) {
|
||||
return input;
|
||||
}
|
||||
|
||||
boolean isCompletlyParameterizedQuery = input.matches("^\\?\\d+$");
|
||||
|
||||
StringBuilder result = new StringBuilder(input);
|
||||
|
||||
for (ParameterBinding binding : bindings) {
|
||||
|
||||
String parameter = binding.getParameter();
|
||||
int idx = result.indexOf(parameter);
|
||||
|
||||
if (idx != -1) {
|
||||
String valueForBinding = getParameterValueForBinding(accessor, binding);
|
||||
|
||||
// if the value to bind is an object literal we need to remove the quoting around
|
||||
// the expression insertion point.
|
||||
boolean shouldPotentiallyRemoveQuotes = valueForBinding.startsWith("{") && !isCompletlyParameterizedQuery;
|
||||
|
||||
int start = idx;
|
||||
int end = idx + parameter.length();
|
||||
|
||||
if (shouldPotentiallyRemoveQuotes) {
|
||||
|
||||
// is the insertion point actually surrounded by quotes?
|
||||
char beforeStart = result.charAt(start - 1);
|
||||
char afterEnd = result.charAt(end);
|
||||
|
||||
if ((beforeStart == '\'' || beforeStart == '"') && (afterEnd == '\'' || afterEnd == '"')) {
|
||||
|
||||
// skip preceeding and following quote
|
||||
start -= 1;
|
||||
end += 1;
|
||||
}
|
||||
}
|
||||
|
||||
result.replace(start, end, valueForBinding);
|
||||
}
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the serialized value to be used for the given {@link ParameterBinding}.
|
||||
*
|
||||
* @param accessor
|
||||
* @param binding
|
||||
* @return
|
||||
*/
|
||||
private String getParameterValueForBinding(ConvertingParameterAccessor accessor, ParameterBinding binding) {
|
||||
|
||||
Object value = binding.isExpression() ? evaluateExpression(binding.getExpression(), accessor.getValues())
|
||||
: accessor.getBindableValue(binding.getParameterIndex());
|
||||
|
||||
if (value instanceof String && binding.isQuoted()) {
|
||||
return (String) value;
|
||||
}
|
||||
|
||||
if (value instanceof byte[]) {
|
||||
|
||||
String base64representation = DatatypeConverter.printBase64Binary((byte[]) value);
|
||||
if (!binding.isQuoted()) {
|
||||
return "{ '$binary' : '" + base64representation + "', '$type' : " + BSON.B_GENERAL + "}";
|
||||
}
|
||||
return base64representation;
|
||||
}
|
||||
|
||||
return JSON.serialize(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates the given {@code expressionString}.
|
||||
*
|
||||
* @param expressionString
|
||||
* @param parameterValues
|
||||
* @return
|
||||
*/
|
||||
private Object evaluateExpression(String expressionString, Object[] parameterValues) {
|
||||
|
||||
EvaluationContext evaluationContext = evaluationContextProvider
|
||||
.getEvaluationContext(getQueryMethod().getParameters(), parameterValues);
|
||||
Expression expression = expressionParser.parseExpression(expressionString);
|
||||
|
||||
return expression.getValue(evaluationContext, Object.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* A parser that extracts the parameter bindings from a given query string.
|
||||
*
|
||||
@@ -318,7 +429,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
static class ParameterBinding {
|
||||
private static class ParameterBinding {
|
||||
|
||||
private final int parameterIndex;
|
||||
private final boolean quoted;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,14 +25,14 @@ import javax.tools.Diagnostic;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
import com.querydsl.apt.AbstractQuerydslProcessor;
|
||||
import com.querydsl.apt.Configuration;
|
||||
import com.querydsl.apt.DefaultConfiguration;
|
||||
import com.querydsl.core.annotations.QueryEmbeddable;
|
||||
import com.querydsl.core.annotations.QueryEmbedded;
|
||||
import com.querydsl.core.annotations.QueryEntities;
|
||||
import com.querydsl.core.annotations.QuerySupertype;
|
||||
import com.querydsl.core.annotations.QueryTransient;
|
||||
import com.mysema.query.annotations.QueryEmbeddable;
|
||||
import com.mysema.query.annotations.QueryEmbedded;
|
||||
import com.mysema.query.annotations.QueryEntities;
|
||||
import com.mysema.query.annotations.QuerySupertype;
|
||||
import com.mysema.query.annotations.QueryTransient;
|
||||
import com.mysema.query.apt.AbstractQuerydslProcessor;
|
||||
import com.mysema.query.apt.Configuration;
|
||||
import com.mysema.query.apt.DefaultConfiguration;
|
||||
|
||||
/**
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annotated classes.
|
||||
|
||||
@@ -30,7 +30,6 @@ import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryMethod;
|
||||
import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery;
|
||||
import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.querydsl.QueryDslPredicateExecutor;
|
||||
import org.springframework.data.repository.core.NamedQueries;
|
||||
import org.springframework.data.repository.core.RepositoryInformation;
|
||||
@@ -54,7 +53,7 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
|
||||
private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser();
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final MongoOperations mongoOperations;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
/**
|
||||
@@ -66,7 +65,7 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
|
||||
Assert.notNull(mongoOperations);
|
||||
|
||||
this.operations = mongoOperations;
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.mappingContext = mongoOperations.getConverter().getMappingContext();
|
||||
}
|
||||
|
||||
@@ -92,7 +91,7 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
|
||||
MongoEntityInformation<?, Serializable> entityInformation = getEntityInformation(information.getDomainType(),
|
||||
information);
|
||||
return getTargetRepositoryViaReflection(information, entityInformation, operations);
|
||||
return getTargetRepositoryViaReflection(information, entityInformation, mongoOperations);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -101,7 +100,7 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
*/
|
||||
@Override
|
||||
protected QueryLookupStrategy getQueryLookupStrategy(Key key, EvaluationContextProvider evaluationContextProvider) {
|
||||
return new MongoQueryLookupStrategy(operations, evaluationContextProvider, mappingContext);
|
||||
return new MongoQueryLookupStrategy(evaluationContextProvider);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -133,39 +132,31 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class MongoQueryLookupStrategy implements QueryLookupStrategy {
|
||||
private class MongoQueryLookupStrategy implements QueryLookupStrategy {
|
||||
|
||||
private final MongoOperations operations;
|
||||
private final EvaluationContextProvider evaluationContextProvider;
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
public MongoQueryLookupStrategy(MongoOperations operations, EvaluationContextProvider evaluationContextProvider,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.operations = operations;
|
||||
public MongoQueryLookupStrategy(EvaluationContextProvider evaluationContextProvider) {
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
this.mappingContext = mappingContext;
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.projection.ProjectionFactory, org.springframework.data.repository.core.NamedQueries)
|
||||
* @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.repository.core.NamedQueries)
|
||||
*/
|
||||
@Override
|
||||
public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory,
|
||||
NamedQueries namedQueries) {
|
||||
public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, NamedQueries namedQueries) {
|
||||
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, mappingContext);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, mappingContext);
|
||||
String namedQueryName = queryMethod.getNamedQueryName();
|
||||
|
||||
if (namedQueries.hasQuery(namedQueryName)) {
|
||||
String namedQuery = namedQueries.getQuery(namedQueryName);
|
||||
return new StringBasedMongoQuery(namedQuery, queryMethod, operations, EXPRESSION_PARSER,
|
||||
return new StringBasedMongoQuery(namedQuery, queryMethod, mongoOperations, EXPRESSION_PARSER,
|
||||
evaluationContextProvider);
|
||||
} else if (queryMethod.hasAnnotatedQuery()) {
|
||||
return new StringBasedMongoQuery(queryMethod, operations, EXPRESSION_PARSER, evaluationContextProvider);
|
||||
return new StringBasedMongoQuery(queryMethod, mongoOperations, EXPRESSION_PARSER, evaluationContextProvider);
|
||||
} else {
|
||||
return new PartTreeMongoQuery(queryMethod, operations);
|
||||
return new PartTreeMongoQuery(queryMethod, mongoOperations);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,12 +34,12 @@ import org.springframework.data.repository.core.EntityInformation;
|
||||
import org.springframework.data.repository.core.EntityMetadata;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.querydsl.core.types.EntityPath;
|
||||
import com.querydsl.core.types.Expression;
|
||||
import com.querydsl.core.types.OrderSpecifier;
|
||||
import com.querydsl.core.types.Predicate;
|
||||
import com.querydsl.core.types.dsl.PathBuilder;
|
||||
import com.querydsl.mongodb.AbstractMongodbQuery;
|
||||
import com.mysema.query.mongodb.MongodbQuery;
|
||||
import com.mysema.query.types.EntityPath;
|
||||
import com.mysema.query.types.Expression;
|
||||
import com.mysema.query.types.OrderSpecifier;
|
||||
import com.mysema.query.types.Predicate;
|
||||
import com.mysema.query.types.path.PathBuilder;
|
||||
|
||||
/**
|
||||
* Special QueryDsl based repository implementation that allows execution {@link Predicate}s in various forms.
|
||||
@@ -47,8 +47,8 @@ import com.querydsl.mongodb.AbstractMongodbQuery;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleMongoRepository<T, ID>
|
||||
implements QueryDslPredicateExecutor<T> {
|
||||
public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleMongoRepository<T, ID> implements
|
||||
QueryDslPredicateExecutor<T> {
|
||||
|
||||
private final PathBuilder<T> builder;
|
||||
private final EntityInformation<T, ID> entityInformation;
|
||||
@@ -92,7 +92,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public T findOne(Predicate predicate) {
|
||||
return createQueryFor(predicate).fetchOne();
|
||||
return createQueryFor(predicate).uniqueResult();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -101,7 +101,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Predicate predicate) {
|
||||
return createQueryFor(predicate).fetchResults().getResults();
|
||||
return createQueryFor(predicate).list();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -110,7 +110,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Predicate predicate, OrderSpecifier<?>... orders) {
|
||||
return createQueryFor(predicate).orderBy(orders).fetchResults().getResults();
|
||||
return createQueryFor(predicate).orderBy(orders).list();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -119,7 +119,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Predicate predicate, Sort sort) {
|
||||
return applySorting(createQueryFor(predicate), sort).fetchResults().getResults();
|
||||
return applySorting(createQueryFor(predicate), sort).list();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -128,7 +128,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public Iterable<T> findAll(OrderSpecifier<?>... orders) {
|
||||
return createQuery().orderBy(orders).fetchResults().getResults();
|
||||
return createQuery().orderBy(orders).list();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -138,11 +138,10 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
@Override
|
||||
public Page<T> findAll(Predicate predicate, Pageable pageable) {
|
||||
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> countQuery = createQueryFor(predicate);
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> query = createQueryFor(predicate);
|
||||
MongodbQuery<T> countQuery = createQueryFor(predicate);
|
||||
MongodbQuery<T> query = createQueryFor(predicate);
|
||||
|
||||
return new PageImpl<T>(applyPagination(query, pageable).fetchResults().getResults(), pageable,
|
||||
countQuery.fetchCount());
|
||||
return new PageImpl<T>(applyPagination(query, pageable).list(), pageable, countQuery.count());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -152,11 +151,10 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
@Override
|
||||
public Page<T> findAll(Pageable pageable) {
|
||||
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> countQuery = createQuery();
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> query = createQuery();
|
||||
MongodbQuery<T> countQuery = createQuery();
|
||||
MongodbQuery<T> query = createQuery();
|
||||
|
||||
return new PageImpl<T>(applyPagination(query, pageable).fetchResults().getResults(), pageable,
|
||||
countQuery.fetchCount());
|
||||
return new PageImpl<T>(applyPagination(query, pageable).list(), pageable, countQuery.count());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -165,7 +163,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Sort sort) {
|
||||
return applySorting(createQuery(), sort).fetchResults().getResults();
|
||||
return applySorting(createQuery(), sort).list();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -174,7 +172,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public long count(Predicate predicate) {
|
||||
return createQueryFor(predicate).fetchCount();
|
||||
return createQueryFor(predicate).count();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -183,7 +181,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*/
|
||||
@Override
|
||||
public boolean exists(Predicate predicate) {
|
||||
return createQueryFor(predicate).fetchCount() > 0;
|
||||
return createQueryFor(predicate).exists();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -192,7 +190,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @param predicate
|
||||
* @return
|
||||
*/
|
||||
private AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> createQueryFor(Predicate predicate) {
|
||||
private MongodbQuery<T> createQueryFor(Predicate predicate) {
|
||||
return createQuery().where(predicate);
|
||||
}
|
||||
|
||||
@@ -201,7 +199,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> createQuery() {
|
||||
private MongodbQuery<T> createQuery() {
|
||||
return new SpringDataMongodbQuery<T>(mongoOperations, entityInformation.getJavaType());
|
||||
}
|
||||
|
||||
@@ -212,8 +210,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @param pageable
|
||||
* @return
|
||||
*/
|
||||
private AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> applyPagination(
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> query, Pageable pageable) {
|
||||
private MongodbQuery<T> applyPagination(MongodbQuery<T> query, Pageable pageable) {
|
||||
|
||||
if (pageable == null) {
|
||||
return query;
|
||||
@@ -230,8 +227,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @param sort
|
||||
* @return
|
||||
*/
|
||||
private AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> applySorting(
|
||||
AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> query, Sort sort) {
|
||||
private MongodbQuery<T> applySorting(MongodbQuery<T> query, Sort sort) {
|
||||
|
||||
if (sort == null) {
|
||||
return query;
|
||||
@@ -264,7 +260,7 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
|
||||
Expression<Object> property = builder.get(order.getProperty());
|
||||
|
||||
return new OrderSpecifier(
|
||||
order.isAscending() ? com.querydsl.core.types.Order.ASC : com.querydsl.core.types.Order.DESC, property);
|
||||
return new OrderSpecifier(order.isAscending() ? com.mysema.query.types.Order.ASC
|
||||
: com.mysema.query.types.Order.DESC, property);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,8 +20,8 @@ import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.querydsl.core.types.EntityPath;
|
||||
import com.querydsl.mongodb.AbstractMongodbQuery;
|
||||
import com.mysema.query.mongodb.MongodbQuery;
|
||||
import com.mysema.query.types.EntityPath;
|
||||
|
||||
/**
|
||||
* Base class to create repository implementations based on Querydsl.
|
||||
@@ -36,7 +36,7 @@ public abstract class QuerydslRepositorySupport {
|
||||
/**
|
||||
* Creates a new {@link QuerydslRepositorySupport} for the given {@link MongoOperations}.
|
||||
*
|
||||
* @param operations must not be {@literal null}.
|
||||
* @param operations must not be {@literal null}
|
||||
*/
|
||||
public QuerydslRepositorySupport(MongoOperations operations) {
|
||||
|
||||
@@ -53,7 +53,7 @@ public abstract class QuerydslRepositorySupport {
|
||||
* @param path
|
||||
* @return
|
||||
*/
|
||||
protected <T> AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> from(final EntityPath<T> path) {
|
||||
protected <T> MongodbQuery<T> from(final EntityPath<T> path) {
|
||||
Assert.notNull(path);
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(path.getType());
|
||||
return from(path, entity.getCollection());
|
||||
@@ -66,7 +66,7 @@ public abstract class QuerydslRepositorySupport {
|
||||
* @param collection must not be blank or {@literal null}
|
||||
* @return
|
||||
*/
|
||||
protected <T> AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> from(final EntityPath<T> path, String collection) {
|
||||
protected <T> MongodbQuery<T> from(final EntityPath<T> path, String collection) {
|
||||
|
||||
Assert.notNull(path);
|
||||
Assert.hasText(collection);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2015 the original author or authors.
|
||||
* Copyright 2012 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,14 +20,14 @@ import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import com.google.common.base.Function;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.querydsl.mongodb.AbstractMongodbQuery;
|
||||
import com.mysema.query.mongodb.MongodbQuery;
|
||||
|
||||
/**
|
||||
* Spring Data specific {@link MongodbQuery} implementation.
|
||||
* Spring Data specfic {@link MongodbQuery} implementation.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class SpringDataMongodbQuery<T> extends AbstractMongodbQuery<T, SpringDataMongodbQuery<T>> {
|
||||
class SpringDataMongodbQuery<T> extends MongodbQuery<T> {
|
||||
|
||||
private final MongoOperations operations;
|
||||
|
||||
@@ -48,8 +48,7 @@ class SpringDataMongodbQuery<T> extends AbstractMongodbQuery<T, SpringDataMongod
|
||||
* @param type must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
*/
|
||||
public SpringDataMongodbQuery(final MongoOperations operations, final Class<? extends T> type,
|
||||
String collectionName) {
|
||||
public SpringDataMongodbQuery(final MongoOperations operations, final Class<? extends T> type, String collectionName) {
|
||||
|
||||
super(operations.getCollection(collectionName), new Function<DBObject, T>() {
|
||||
public T apply(DBObject input) {
|
||||
@@ -62,7 +61,7 @@ class SpringDataMongodbQuery<T> extends AbstractMongodbQuery<T, SpringDataMongod
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.mongodb.AbstractMongodbQuery#getCollection(java.lang.Class)
|
||||
* @see com.mysema.query.mongodb.MongodbQuery#getCollection(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
protected DBCollection getCollection(Class<?> type) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,10 +29,12 @@ import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
import com.querydsl.core.types.Path;
|
||||
import com.querydsl.core.types.PathMetadata;
|
||||
import com.querydsl.core.types.PathType;
|
||||
import com.querydsl.mongodb.MongodbSerializer;
|
||||
import com.mysema.query.mongodb.MongodbSerializer;
|
||||
import com.mysema.query.types.Constant;
|
||||
import com.mysema.query.types.Operation;
|
||||
import com.mysema.query.types.Path;
|
||||
import com.mysema.query.types.PathMetadata;
|
||||
import com.mysema.query.types.PathType;
|
||||
|
||||
/**
|
||||
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
|
||||
@@ -74,10 +76,10 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#getKeyForPath(com.querydsl.core.types.Path, com.querydsl.core.types.PathMetadata)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#getKeyForPath(com.mysema.query.types.Path, com.mysema.query.types.PathMetadata)
|
||||
*/
|
||||
@Override
|
||||
protected String getKeyForPath(Path<?> expr, PathMetadata metadata) {
|
||||
protected String getKeyForPath(Path<?> expr, PathMetadata<?> metadata) {
|
||||
|
||||
if (!metadata.getPathType().equals(PathType.PROPERTY)) {
|
||||
return super.getKeyForPath(expr, metadata);
|
||||
@@ -92,7 +94,7 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#asDBObject(java.lang.String, java.lang.Object)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asDBObject(java.lang.String, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected DBObject asDBObject(String key, Object value) {
|
||||
@@ -106,7 +108,7 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#isReference(com.querydsl.core.types.Path)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#isReference(com.mysema.query.types.Path)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isReference(Path<?> path) {
|
||||
@@ -117,13 +119,34 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.mongodb.MongodbSerializer#asReference(java.lang.Object)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected DBRef asReference(Object constant) {
|
||||
return converter.toDBRef(constant, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(com.mysema.query.types.Operation, int)
|
||||
*/
|
||||
@Override
|
||||
protected DBRef asReference(Operation<?> expr, int constIndex) {
|
||||
|
||||
for (Object arg : expr.getArgs()) {
|
||||
|
||||
if (arg instanceof Path) {
|
||||
|
||||
MongoPersistentProperty property = getPropertyFor((Path<?>) arg);
|
||||
Object constant = ((Constant<?>) expr.getArg(constIndex)).getConstant();
|
||||
|
||||
return converter.toDBRef(constant, property);
|
||||
}
|
||||
}
|
||||
|
||||
return super.asReference(expr, constIndex);
|
||||
}
|
||||
|
||||
private MongoPersistentProperty getPropertyFor(Path<?> path) {
|
||||
|
||||
Path<?> parent = path.getMetadata().getParent();
|
||||
|
||||
@@ -1,140 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.util;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
/**
|
||||
* {@link MongoDbErrorCodes} holds MongoDB specific error codes outlined in {@literal mongo/base/error_codes.err}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
public final class MongoDbErrorCodes {
|
||||
|
||||
static HashMap<Integer, String> dataAccessResourceFailureCodes;
|
||||
static HashMap<Integer, String> dataIntegrityViolationCodes;
|
||||
static HashMap<Integer, String> duplicateKeyCodes;
|
||||
static HashMap<Integer, String> invalidDataAccessApiUsageExeption;
|
||||
static HashMap<Integer, String> permissionDeniedCodes;
|
||||
|
||||
static HashMap<Integer, String> errorCodes;
|
||||
|
||||
static {
|
||||
|
||||
dataAccessResourceFailureCodes = new HashMap<Integer, String>(10);
|
||||
dataAccessResourceFailureCodes.put(6, "HostUnreachable");
|
||||
dataAccessResourceFailureCodes.put(7, "HostNotFound");
|
||||
dataAccessResourceFailureCodes.put(89, "NetworkTimeout");
|
||||
dataAccessResourceFailureCodes.put(91, "ShutdownInProgress");
|
||||
dataAccessResourceFailureCodes.put(12000, "SlaveDelayDifferential");
|
||||
dataAccessResourceFailureCodes.put(10084, "CannotFindMapFile64Bit");
|
||||
dataAccessResourceFailureCodes.put(10085, "CannotFindMapFile");
|
||||
dataAccessResourceFailureCodes.put(10357, "ShutdownInProgress");
|
||||
dataAccessResourceFailureCodes.put(10359, "Header==0");
|
||||
dataAccessResourceFailureCodes.put(13440, "BadOffsetInFile");
|
||||
dataAccessResourceFailureCodes.put(13441, "BadOffsetInFile");
|
||||
dataAccessResourceFailureCodes.put(13640, "DataFileHeaderCorrupt");
|
||||
|
||||
dataIntegrityViolationCodes = new HashMap<Integer, String>(6);
|
||||
dataIntegrityViolationCodes.put(67, "CannotCreateIndex");
|
||||
dataIntegrityViolationCodes.put(68, "IndexAlreadyExists");
|
||||
dataIntegrityViolationCodes.put(85, "IndexOptionsConflict");
|
||||
dataIntegrityViolationCodes.put(86, "IndexKeySpecsConflict");
|
||||
dataIntegrityViolationCodes.put(112, "WriteConflict");
|
||||
dataIntegrityViolationCodes.put(117, "ConflictingOperationInProgress");
|
||||
|
||||
duplicateKeyCodes = new HashMap<Integer, String>(3);
|
||||
duplicateKeyCodes.put(3, "OBSOLETE_DuplicateKey");
|
||||
duplicateKeyCodes.put(84, "DuplicateKeyValue");
|
||||
duplicateKeyCodes.put(11000, "DuplicateKey");
|
||||
duplicateKeyCodes.put(11001, "DuplicateKey");
|
||||
|
||||
invalidDataAccessApiUsageExeption = new HashMap<Integer, String>();
|
||||
invalidDataAccessApiUsageExeption.put(5, "GraphContainsCycle");
|
||||
invalidDataAccessApiUsageExeption.put(9, "FailedToParse");
|
||||
invalidDataAccessApiUsageExeption.put(14, "TypeMismatch");
|
||||
invalidDataAccessApiUsageExeption.put(15, "Overflow");
|
||||
invalidDataAccessApiUsageExeption.put(16, "InvalidLength");
|
||||
invalidDataAccessApiUsageExeption.put(20, "IllegalOperation");
|
||||
invalidDataAccessApiUsageExeption.put(21, "EmptyArrayOperation");
|
||||
invalidDataAccessApiUsageExeption.put(22, "InvalidBSON");
|
||||
invalidDataAccessApiUsageExeption.put(23, "AlreadyInitialized");
|
||||
invalidDataAccessApiUsageExeption.put(29, "NonExistentPath");
|
||||
invalidDataAccessApiUsageExeption.put(30, "InvalidPath");
|
||||
invalidDataAccessApiUsageExeption.put(40, "ConflictingUpdateOperators");
|
||||
invalidDataAccessApiUsageExeption.put(45, "UserDataInconsistent");
|
||||
invalidDataAccessApiUsageExeption.put(30, "DollarPrefixedFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(52, "InvalidPath");
|
||||
invalidDataAccessApiUsageExeption.put(53, "InvalidIdField");
|
||||
invalidDataAccessApiUsageExeption.put(54, "NotSingleValueField");
|
||||
invalidDataAccessApiUsageExeption.put(55, "InvalidDBRef");
|
||||
invalidDataAccessApiUsageExeption.put(56, "EmptyFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(57, "DottedFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(59, "CommandNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(60, "DatabaseNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(61, "ShardKeyNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(62, "OplogOperationUnsupported");
|
||||
invalidDataAccessApiUsageExeption.put(66, "ImmutableField");
|
||||
invalidDataAccessApiUsageExeption.put(72, "InvalidOptions");
|
||||
invalidDataAccessApiUsageExeption.put(115, "CommandNotSupported");
|
||||
invalidDataAccessApiUsageExeption.put(116, "DocTooLargeForCapped");
|
||||
invalidDataAccessApiUsageExeption.put(130, "SymbolNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(17280, "KeyTooLong");
|
||||
invalidDataAccessApiUsageExeption.put(13334, "ShardKeyTooBig");
|
||||
|
||||
permissionDeniedCodes = new HashMap<Integer, String>();
|
||||
permissionDeniedCodes.put(11, "UserNotFound");
|
||||
permissionDeniedCodes.put(18, "AuthenticationFailed");
|
||||
permissionDeniedCodes.put(31, "RoleNotFound");
|
||||
permissionDeniedCodes.put(32, "RolesNotRelated");
|
||||
permissionDeniedCodes.put(33, "PrvilegeNotFound");
|
||||
permissionDeniedCodes.put(15847, "CannotAuthenticate");
|
||||
permissionDeniedCodes.put(16704, "CannotAuthenticateToAdminDB");
|
||||
permissionDeniedCodes.put(16705, "CannotAuthenticateToAdminDB");
|
||||
|
||||
errorCodes = new HashMap<Integer, String>();
|
||||
errorCodes.putAll(dataAccessResourceFailureCodes);
|
||||
errorCodes.putAll(dataIntegrityViolationCodes);
|
||||
errorCodes.putAll(duplicateKeyCodes);
|
||||
errorCodes.putAll(invalidDataAccessApiUsageExeption);
|
||||
errorCodes.putAll(permissionDeniedCodes);
|
||||
}
|
||||
|
||||
public static boolean isDataIntegrityViolationCode(Integer errorCode) {
|
||||
return errorCode == null ? false : dataIntegrityViolationCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isDataAccessResourceFailureCode(Integer errorCode) {
|
||||
return errorCode == null ? false : dataAccessResourceFailureCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isDuplicateKeyCode(Integer errorCode) {
|
||||
return errorCode == null ? false : duplicateKeyCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isPermissionDeniedCode(Integer errorCode) {
|
||||
return errorCode == null ? false : permissionDeniedCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isInvalidDataAccessApiUsageCode(Integer errorCode) {
|
||||
return errorCode == null ? false : invalidDataAccessApiUsageExeption.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static String getErrorDescription(Integer errorCode) {
|
||||
return errorCode == null ? null : errorCodes.get(errorCode);
|
||||
}
|
||||
}
|
||||
@@ -1,343 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.Tuple;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link DefaultBulkOperations}.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class DefaultBulkOperationsIntegrationTests {
|
||||
|
||||
static final String COLLECTION_NAME = "bulk_ops";
|
||||
|
||||
@Autowired MongoOperations operations;
|
||||
|
||||
DBCollection collection;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
this.collection = this.operations.getCollection(COLLECTION_NAME);
|
||||
this.collection.remove(new BasicDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullMongoOperations() {
|
||||
new DefaultBulkOperations(null, null, COLLECTION_NAME, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullCollectionName() {
|
||||
new DefaultBulkOperations(operations, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsEmptyCollectionName() {
|
||||
new DefaultBulkOperations(operations, null, "", null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void insertOrdered() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
|
||||
|
||||
assertThat(createBulkOps(BulkMode.ORDERED).insert(documents).execute().getInsertedCount(), is(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void insertOrderedFails() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2"));
|
||||
|
||||
try {
|
||||
createBulkOps(BulkMode.ORDERED).insert(documents).execute();
|
||||
fail();
|
||||
} catch (BulkOperationException e) {
|
||||
assertThat(e.getResult().getInsertedCount(), is(1)); // fails after first error
|
||||
assertThat(e.getErrors(), notNullValue());
|
||||
assertThat(e.getErrors().size(), is(1));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void insertUnOrdered() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
|
||||
|
||||
assertThat(createBulkOps(BulkMode.UNORDERED).insert(documents).execute().getInsertedCount(), is(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void insertUnOrderedContinuesOnError() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2"));
|
||||
|
||||
try {
|
||||
createBulkOps(BulkMode.UNORDERED).insert(documents).execute();
|
||||
fail();
|
||||
} catch (BulkOperationException e) {
|
||||
assertThat(e.getResult().getInsertedCount(), is(2)); // two docs were inserted
|
||||
assertThat(e.getErrors(), notNullValue());
|
||||
assertThat(e.getErrors().size(), is(1));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void upsertDoesUpdate() {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).//
|
||||
upsert(where("value", "value1"), set("value", "value2")).//
|
||||
execute();
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.getMatchedCount(), is(2));
|
||||
assertThat(result.getModifiedCount(), is(2));
|
||||
assertThat(result.getInsertedCount(), is(0));
|
||||
assertThat(result.getUpserts(), is(notNullValue()));
|
||||
assertThat(result.getUpserts().size(), is(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void upsertDoesInsert() {
|
||||
|
||||
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).//
|
||||
upsert(where("_id", "1"), set("value", "v1")).//
|
||||
execute();
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.getMatchedCount(), is(0));
|
||||
assertThat(result.getModifiedCount(), is(0));
|
||||
assertThat(result.getUpserts(), is(notNullValue()));
|
||||
assertThat(result.getUpserts().size(), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void updateOneOrdered() {
|
||||
testUpdate(BulkMode.ORDERED, false, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void updateMultiOrdered() {
|
||||
testUpdate(BulkMode.ORDERED, true, 4);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void updateOneUnOrdered() {
|
||||
testUpdate(BulkMode.UNORDERED, false, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void updateMultiUnOrdered() {
|
||||
testUpdate(BulkMode.UNORDERED, true, 4);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void removeOrdered() {
|
||||
testRemove(BulkMode.ORDERED);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void removeUnordered() {
|
||||
testRemove(BulkMode.UNORDERED);
|
||||
}
|
||||
|
||||
/**
|
||||
* If working on the same set of documents, only an ordered bulk operation will yield predictable results.
|
||||
*
|
||||
* @see DATAMONGO-934
|
||||
*/
|
||||
@Test
|
||||
public void mixedBulkOrdered() {
|
||||
|
||||
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).insert(newDoc("1", "v1")).//
|
||||
updateOne(where("_id", "1"), set("value", "v2")).//
|
||||
remove(where("value", "v2")).//
|
||||
execute();
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.getInsertedCount(), is(1));
|
||||
assertThat(result.getModifiedCount(), is(1));
|
||||
assertThat(result.getRemovedCount(), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* If working on the same set of documents, only an ordered bulk operation will yield predictable results.
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void mixedBulkOrderedWithList() {
|
||||
|
||||
List<BaseDoc> inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2"));
|
||||
List<Tuple<Query, Update>> updates = Arrays.asList(Tuple.of(where("value", "v2"), set("value", "v3")));
|
||||
List<Query> removes = Arrays.asList(where("_id", "1"));
|
||||
|
||||
BulkWriteResult result = createBulkOps(BulkMode.ORDERED).insert(inserts).updateMulti(updates).remove(removes)
|
||||
.execute();
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.getInsertedCount(), is(3));
|
||||
assertThat(result.getModifiedCount(), is(2));
|
||||
assertThat(result.getRemovedCount(), is(1));
|
||||
}
|
||||
|
||||
private void testUpdate(BulkMode mode, boolean multi, int expectedUpdates) {
|
||||
|
||||
BulkOperations bulkOps = createBulkOps(mode);
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
List<Tuple<Query, Update>> updates = new ArrayList<Tuple<Query, Update>>();
|
||||
updates.add(Tuple.of(where("value", "value1"), set("value", "value3")));
|
||||
updates.add(Tuple.of(where("value", "value2"), set("value", "value4")));
|
||||
|
||||
int modifiedCount = multi ? bulkOps.updateMulti(updates).execute().getModifiedCount()
|
||||
: bulkOps.updateOne(updates).execute().getModifiedCount();
|
||||
|
||||
assertThat(modifiedCount, is(expectedUpdates));
|
||||
}
|
||||
|
||||
private void testRemove(BulkMode mode) {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
List<Query> removes = Arrays.asList(where("_id", "1"), where("value", "value2"));
|
||||
|
||||
assertThat(createBulkOps(mode).remove(removes).execute().getRemovedCount(), is(3));
|
||||
}
|
||||
|
||||
private BulkOperations createBulkOps(BulkMode mode) {
|
||||
|
||||
DefaultBulkOperations operations = new DefaultBulkOperations(this.operations, mode, COLLECTION_NAME, null);
|
||||
operations.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED);
|
||||
|
||||
return operations;
|
||||
}
|
||||
|
||||
private void insertSomeDocuments() {
|
||||
|
||||
final DBCollection coll = operations.getCollection(COLLECTION_NAME);
|
||||
|
||||
coll.insert(rawDoc("1", "value1"));
|
||||
coll.insert(rawDoc("2", "value1"));
|
||||
coll.insert(rawDoc("3", "value2"));
|
||||
coll.insert(rawDoc("4", "value2"));
|
||||
}
|
||||
|
||||
private static BaseDoc newDoc(String id) {
|
||||
|
||||
BaseDoc doc = new BaseDoc();
|
||||
doc.id = id;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static BaseDoc newDoc(String id, String value) {
|
||||
|
||||
BaseDoc doc = newDoc(id);
|
||||
doc.value = value;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static Query where(String field, String value) {
|
||||
return new Query().addCriteria(Criteria.where(field).is(value));
|
||||
}
|
||||
|
||||
private static Update set(String field, String value) {
|
||||
return new Update().set(field, value);
|
||||
}
|
||||
|
||||
private static DBObject rawDoc(String id, String value) {
|
||||
return new BasicDBObject("_id", id).append("value", value);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,7 +24,6 @@ import java.text.Format;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Currency;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.UUID;
|
||||
@@ -272,18 +271,6 @@ public class CustomConversionsUnitTests {
|
||||
assertThat(customConversions.getCustomWriteTarget(String.class, SimpleDateFormat.class), notNullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1372
|
||||
*/
|
||||
@Test
|
||||
public void registersConvertersForCurrency() {
|
||||
|
||||
CustomConversions customConversions = new CustomConversions();
|
||||
|
||||
assertThat(customConversions.hasCustomWriteTarget(Currency.class), is(true));
|
||||
assertThat(customConversions.hasCustomReadTarget(String.class, Currency.class), is(true));
|
||||
}
|
||||
|
||||
private static Class<?> createProxyTypeFor(Class<?> type) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2016 by the original author(s).
|
||||
* Copyright (c) 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,7 +19,6 @@ import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Currency;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.geo.Box;
|
||||
@@ -28,9 +27,7 @@ import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Polygon;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.CurrencyToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToCurrencyConverter;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
@@ -40,7 +37,6 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoConvertersUnitTests {
|
||||
|
||||
@@ -124,20 +120,4 @@ public class MongoConvertersUnitTests {
|
||||
|
||||
assertThat(converted, is((org.springframework.data.geo.Point) point));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1372
|
||||
*/
|
||||
@Test
|
||||
public void convertsCurrencyToStringCorrectly() {
|
||||
assertThat(CurrencyToStringConverter.INSTANCE.convert(Currency.getInstance("USD")), is("USD"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1372
|
||||
*/
|
||||
@Test
|
||||
public void convertsStringToCurrencyCorrectly() {
|
||||
assertThat(StringToCurrencyConverter.INSTANCE.convert("USD"), is(Currency.getInstance("USD")));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.core.Is.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.junit.runners.Parameterized.Parameter;
|
||||
import org.junit.runners.Parameterized.Parameters;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.NumberToNumberConverterFactory;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
public class NumberToNumberConverterFactoryUnitTests {
|
||||
|
||||
public @Parameter(0) Number source;
|
||||
|
||||
public @Parameter(1) Number expected;
|
||||
|
||||
@Parameters
|
||||
public static Collection<Number[]> parameters() {
|
||||
|
||||
Number[] longToInt = new Number[] { new Long(10), new Integer(10) };
|
||||
Number[] atomicIntToInt = new Number[] { new AtomicInteger(10), new Integer(10) };
|
||||
Number[] atomicIntToDouble = new Number[] { new AtomicInteger(10), new Double(10) };
|
||||
Number[] atomicLongToInt = new Number[] { new AtomicLong(10), new Integer(10) };
|
||||
Number[] atomicLongToLong = new Number[] { new AtomicLong(10), new Long(10) };
|
||||
|
||||
return Arrays.<Number[]> asList(longToInt, atomicIntToInt, atomicIntToDouble, atomicLongToInt, atomicLongToLong);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1288
|
||||
*/
|
||||
@Test
|
||||
public void convertsToTargetTypeCorrectly() {
|
||||
assertThat(NumberToNumberConverterFactory.INSTANCE.getConverter(expected.getClass()).convert(source), is(expected));
|
||||
}
|
||||
}
|
||||
@@ -27,11 +27,9 @@ import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.collection.IsIterableContainingInOrder;
|
||||
import org.hamcrest.core.Is;
|
||||
import org.hamcrest.core.IsEqual;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -796,7 +794,7 @@ public class UpdateMapperUnitTests {
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1251
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForSimpleTypes() {
|
||||
@@ -812,7 +810,7 @@ public class UpdateMapperUnitTests {
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1251
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForJava8Date() {
|
||||
@@ -828,7 +826,7 @@ public class UpdateMapperUnitTests {
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1251
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForCollectionTypes() {
|
||||
@@ -844,7 +842,7 @@ public class UpdateMapperUnitTests {
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1251
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForPropertyOfNestedDocument() {
|
||||
@@ -859,34 +857,6 @@ public class UpdateMapperUnitTests {
|
||||
assertThat($set.get("concreteValue.name"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1288
|
||||
*/
|
||||
@Test
|
||||
public void mapsAtomicIntegerToIntegerCorrectly() {
|
||||
|
||||
Update update = new Update().set("intValue", new AtomicInteger(10));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.get("intValue"), Is.<Object> is(10));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1288
|
||||
*/
|
||||
@Test
|
||||
public void mapsAtomicIntegerToPrimitiveIntegerCorrectly() {
|
||||
|
||||
Update update = new Update().set("primIntValue", new AtomicInteger(10));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.get("primIntValue"), Is.<Object> is(10));
|
||||
}
|
||||
|
||||
static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes {
|
||||
ListModelWrapper concreteTypeWithListAttributeOfInterfaceType;
|
||||
}
|
||||
@@ -1161,10 +1131,4 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
LocalDate date;
|
||||
}
|
||||
|
||||
static class SimpleValueHolder {
|
||||
|
||||
Integer intValue;
|
||||
int primIntValue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
* Copyright (c) 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,23 +18,15 @@ package org.springframework.data.mongodb.core.index;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
@@ -47,15 +39,12 @@ import com.mongodb.MongoException;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class IndexingIntegrationTests {
|
||||
|
||||
@Autowired MongoOperations operations;
|
||||
@Autowired MongoDbFactory mongoDbFactory;
|
||||
@Autowired ConfigurableApplicationContext context;
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
@@ -63,40 +52,19 @@ public class IndexingIntegrationTests {
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-237
|
||||
* @see DATADOC-237
|
||||
*/
|
||||
@Test
|
||||
@DirtiesContext
|
||||
public void createsIndexWithFieldName() {
|
||||
|
||||
operations.getConverter().getMappingContext().getPersistentEntity(IndexedPerson.class);
|
||||
|
||||
operations.save(new IndexedPerson());
|
||||
assertThat(hasIndex("_firstname", IndexedPerson.class), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1163
|
||||
*/
|
||||
@Test
|
||||
@DirtiesContext
|
||||
public void createsIndexFromMetaAnnotation() {
|
||||
|
||||
operations.getConverter().getMappingContext().getPersistentEntity(IndexedPerson.class);
|
||||
|
||||
assertThat(hasIndex("_lastname", IndexedPerson.class), is(true));
|
||||
}
|
||||
|
||||
@Target({ ElementType.FIELD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Indexed
|
||||
@interface IndexedFieldAnnotation {
|
||||
}
|
||||
|
||||
@Document
|
||||
class IndexedPerson {
|
||||
|
||||
@Field("_firstname") @Indexed String firstname;
|
||||
@Field("_lastname") @IndexedFieldAnnotation String lastname;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,10 +19,6 @@ import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@@ -181,21 +177,6 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
equalTo(new BasicDBObjectBuilder().add("nested.indexedDbRef", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1163
|
||||
*/
|
||||
@Test
|
||||
public void resolveIndexDefinitionInMetaAnnotatedFields() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(
|
||||
IndexOnMetaAnnotatedField.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertThat(indexDefinitions.get(0).getCollection(), equalTo("indexOnMetaAnnotatedField"));
|
||||
assertThat(indexDefinitions.get(0).getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("name", "_name").get()));
|
||||
}
|
||||
|
||||
@Document(collection = "Zero")
|
||||
static class IndexOnLevelZero {
|
||||
@Indexed String indexedProperty;
|
||||
@@ -250,18 +231,6 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
|
||||
}
|
||||
|
||||
@Target({ ElementType.FIELD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Indexed
|
||||
@interface IndexedFieldAnnotation {
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
static class IndexOnMetaAnnotatedField {
|
||||
@Field("_name") @IndexedFieldAnnotation String lastname;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test resolution of {@link GeoSpatialIndexed}.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,15 +16,14 @@
|
||||
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import com.querydsl.core.annotations.QuerySupertype;
|
||||
import com.mysema.query.annotations.QuerySupertype;
|
||||
|
||||
/**
|
||||
* {@link QuerySupertype} is necessary for Querydsl 2.2.0-beta4 to compile the query classes directly. Can be removed as
|
||||
* soon as {@link https://bugs.launchpad.net/querydsl/+bug/776219} is fixed.
|
||||
*
|
||||
* @see https://bugs.launchpad.net/querydsl/+bug/776219
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
*/
|
||||
@QuerySupertype
|
||||
public abstract class BasePerson {
|
||||
@@ -33,7 +32,8 @@ public abstract class BasePerson {
|
||||
protected String firstName;
|
||||
protected String lastName;
|
||||
|
||||
public BasePerson() {}
|
||||
public BasePerson() {
|
||||
}
|
||||
|
||||
public BasePerson(Integer ssn, String firstName, String lastName) {
|
||||
this.ssn = ssn;
|
||||
|
||||
@@ -19,11 +19,6 @@ import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
@@ -231,18 +226,6 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
verify(dbRefMock, times(1)).lazy();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1291
|
||||
*/
|
||||
@Test
|
||||
public void metaInformationShouldBeReadCorrectlyFromInheritedDocumentAnnotation() {
|
||||
|
||||
BasicMongoPersistentEntity<DocumentWithCustomAnnotation> entity = new BasicMongoPersistentEntity<DocumentWithCustomAnnotation>(
|
||||
ClassTypeInformation.from(DocumentWithCustomAnnotation.class));
|
||||
|
||||
assertThat(entity.getCollection(), is("collection-1"));
|
||||
}
|
||||
|
||||
@Document(collection = "contacts")
|
||||
class Contact {
|
||||
|
||||
@@ -278,15 +261,4 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
static class AnyDocument {
|
||||
|
||||
}
|
||||
|
||||
@CustomDocumentAnnotation
|
||||
static class DocumentWithCustomAnnotation {
|
||||
|
||||
}
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.TYPE })
|
||||
@Document(collection = "collection-1")
|
||||
static @interface CustomDocumentAnnotation {
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,7 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import com.querydsl.core.annotations.QueryEmbeddable;
|
||||
import com.mysema.query.annotations.QueryEmbeddable;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
|
||||
@@ -53,9 +53,7 @@ import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.Meta;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -71,23 +69,27 @@ import com.mongodb.WriteResult;
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class AbstractMongoQueryUnitTests {
|
||||
|
||||
@Mock RepositoryMetadata metadataMock;
|
||||
@Mock MongoOperations mongoOperationsMock;
|
||||
@Mock BasicMongoPersistentEntity<?> persitentEntityMock;
|
||||
@Mock @SuppressWarnings("rawtypes") BasicMongoPersistentEntity persitentEntityMock;
|
||||
@Mock MongoMappingContext mappingContextMock;
|
||||
@Mock WriteResult writeResultMock;
|
||||
|
||||
@Before
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public void setUp() {
|
||||
|
||||
doReturn("persons").when(persitentEntityMock).getCollection();
|
||||
doReturn(persitentEntityMock).when(mappingContextMock).getPersistentEntity(Matchers.any(Class.class));
|
||||
doReturn(Person.class).when(persitentEntityMock).getType();
|
||||
when(metadataMock.getDomainType()).thenReturn((Class) Person.class);
|
||||
when(metadataMock.getReturnedDomainClass(Matchers.any(Method.class))).thenReturn((Class) Person.class);
|
||||
when(persitentEntityMock.getCollection()).thenReturn("persons");
|
||||
when(mappingContextMock.getPersistentEntity(Matchers.any(Class.class))).thenReturn(persitentEntityMock);
|
||||
when(persitentEntityMock.getType()).thenReturn(Person.class);
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDbFactory.class));
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContextMock);
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
doReturn(converter).when(mongoOperationsMock).getConverter();
|
||||
when(mongoOperationsMock.getConverter()).thenReturn(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -99,8 +101,8 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
createQueryForMethod("deletePersonByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" });
|
||||
|
||||
verify(mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), eq(Person.class), eq("persons"));
|
||||
verify(mongoOperationsMock, times(0)).find(Matchers.any(Query.class), Matchers.any(Class.class),
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(0)).find(Matchers.any(Query.class), Matchers.any(Class.class),
|
||||
Matchers.anyString());
|
||||
}
|
||||
|
||||
@@ -112,12 +114,13 @@ public class AbstractMongoQueryUnitTests {
|
||||
@Test
|
||||
public void testDeleteExecutionLoadsListOfRemovedDocumentsWhenReturnTypeIsCollectionLike() {
|
||||
|
||||
when(mongoOperationsMock.find(Matchers.any(Query.class), Matchers.any(Class.class), Matchers.anyString()))
|
||||
when(this.mongoOperationsMock.find(Matchers.any(Query.class), Matchers.any(Class.class), Matchers.anyString()))
|
||||
.thenReturn(Arrays.asList(new Person(new ObjectId(new Date()), "bar")));
|
||||
|
||||
createQueryForMethod("deleteByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" });
|
||||
|
||||
verify(mongoOperationsMock, times(1)).findAllAndRemove(Matchers.any(Query.class), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).findAllAndRemove(Matchers.any(Query.class), eq(Person.class),
|
||||
eq("persons"));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -140,14 +143,14 @@ public class AbstractMongoQueryUnitTests {
|
||||
public void testDeleteExecutionReturnsNrDocumentsDeletedFromWriteResult() {
|
||||
|
||||
when(writeResultMock.getN()).thenReturn(100);
|
||||
when(mongoOperationsMock.remove(Matchers.any(Query.class), eq(Person.class), eq("persons")))
|
||||
when(this.mongoOperationsMock.remove(Matchers.any(Query.class), eq(Person.class), eq("persons")))
|
||||
.thenReturn(writeResultMock);
|
||||
|
||||
MongoQueryFake query = createQueryForMethod("deletePersonByLastname", String.class);
|
||||
query.setDeleteQuery(true);
|
||||
|
||||
assertThat(query.execute(new Object[] { "fake" }), is((Object) 100L));
|
||||
verify(mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), eq(Person.class), eq("persons"));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -161,7 +164,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(mongoOperationsMock, times(1)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
|
||||
assertThat(captor.getValue().getMeta().getComment(), nullValue());
|
||||
}
|
||||
@@ -192,7 +195,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(mongoOperationsMock, times(1)).count(captor.capture(), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).count(captor.capture(), eq(Person.class), eq("persons"));
|
||||
assertThat(captor.getValue().getMeta().getComment(), is("comment"));
|
||||
}
|
||||
|
||||
@@ -226,7 +229,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
|
||||
assertThat(captor.getAllValues().get(0).getSkip(), is(0));
|
||||
assertThat(captor.getAllValues().get(1).getSkip(), is(10));
|
||||
@@ -247,7 +250,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
|
||||
assertThat(captor.getAllValues().get(0).getLimit(), is(11));
|
||||
assertThat(captor.getAllValues().get(1).getLimit(), is(11));
|
||||
@@ -268,7 +271,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
|
||||
DBObject expectedSortObject = new BasicDBObjectBuilder().add("bar", -1).get();
|
||||
assertThat(captor.getAllValues().get(0).getSortObject(), is(expectedSortObject));
|
||||
@@ -295,9 +298,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
try {
|
||||
|
||||
Method method = Repo.class.getMethod(methodName, paramTypes);
|
||||
ProjectionFactory factory = new SpelAwareProxyProjectionFactory();
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(Repo.class), factory,
|
||||
mappingContextMock);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadataMock, mappingContextMock);
|
||||
|
||||
return new MongoQueryFake(queryMethod, mongoOperationsMock);
|
||||
|
||||
|
||||
@@ -30,8 +30,6 @@ import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.TextCriteria;
|
||||
import org.springframework.data.mongodb.repository.Person;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.repository.Repository;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
|
||||
@@ -44,16 +42,15 @@ import org.springframework.data.repository.core.support.DefaultRepositoryMetadat
|
||||
*/
|
||||
public class MongoParametersParameterAccessorUnitTests {
|
||||
|
||||
Distance DISTANCE = new Distance(2.5, Metrics.KILOMETERS);
|
||||
RepositoryMetadata metadata = new DefaultRepositoryMetadata(PersonRepository.class);
|
||||
MongoMappingContext context = new MongoMappingContext();
|
||||
ProjectionFactory factory = new SpelAwareProxyProjectionFactory();
|
||||
private static final Distance DISTANCE = new Distance(2.5, Metrics.KILOMETERS);
|
||||
private static final RepositoryMetadata metadata = new DefaultRepositoryMetadata(PersonRepository.class);
|
||||
private static final MongoMappingContext context = new MongoMappingContext();
|
||||
|
||||
@Test
|
||||
public void returnsNullForDistanceIfNoneAvailable() throws NoSuchMethodException, SecurityException {
|
||||
|
||||
Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, context);
|
||||
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod,
|
||||
new Object[] { new Point(10, 20) });
|
||||
@@ -64,10 +61,10 @@ public class MongoParametersParameterAccessorUnitTests {
|
||||
public void returnsDistanceIfAvailable() throws NoSuchMethodException, SecurityException {
|
||||
|
||||
Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, context);
|
||||
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod,
|
||||
new Object[] { new Point(10, 20), DISTANCE });
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] {
|
||||
new Point(10, 20), DISTANCE });
|
||||
assertThat(accessor.getDistanceRange().getUpperBound(), is(DISTANCE));
|
||||
}
|
||||
|
||||
@@ -78,10 +75,10 @@ public class MongoParametersParameterAccessorUnitTests {
|
||||
public void shouldReturnAsFullTextStringWhenNoneDefinedForMethod() throws NoSuchMethodException, SecurityException {
|
||||
|
||||
Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, context);
|
||||
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod,
|
||||
new Object[] { new Point(10, 20), DISTANCE });
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] {
|
||||
new Point(10, 20), DISTANCE });
|
||||
assertThat(accessor.getFullText(), IsNull.nullValue());
|
||||
}
|
||||
|
||||
@@ -92,10 +89,10 @@ public class MongoParametersParameterAccessorUnitTests {
|
||||
public void shouldProperlyConvertTextCriteria() throws NoSuchMethodException, SecurityException {
|
||||
|
||||
Method method = PersonRepository.class.getMethod("findByFirstname", String.class, TextCriteria.class);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, context);
|
||||
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod,
|
||||
new Object[] { "spring", TextCriteria.forDefaultLanguage().matching("data") });
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] { "spring",
|
||||
TextCriteria.forDefaultLanguage().matching("data") });
|
||||
assertThat(accessor.getFullText().getCriteriaObject().toString(),
|
||||
equalTo("{ \"$text\" : { \"$search\" : \"data\"}}"));
|
||||
}
|
||||
@@ -107,13 +104,13 @@ public class MongoParametersParameterAccessorUnitTests {
|
||||
public void shouldDetectMinAndMaxDistance() throws NoSuchMethodException, SecurityException {
|
||||
|
||||
Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Range.class);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, context);
|
||||
|
||||
Distance min = new Distance(10, Metrics.KILOMETERS);
|
||||
Distance max = new Distance(20, Metrics.KILOMETERS);
|
||||
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod,
|
||||
new Object[] { new Point(10, 20), Distance.between(min, max) });
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] {
|
||||
new Point(10, 20), Distance.between(min, max) });
|
||||
|
||||
Range<Distance> range = accessor.getDistanceRange();
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
@@ -25,11 +26,16 @@ import static org.springframework.data.mongodb.repository.query.StubParameterAcc
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
@@ -37,28 +43,21 @@ import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Polygon;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.Person;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexed;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.repository.Repository;
|
||||
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
/**
|
||||
* Unit test for {@link MongoQueryCreator}.
|
||||
@@ -67,12 +66,14 @@ import com.mongodb.DBObject;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class MongoQueryCreatorUnitTests {
|
||||
|
||||
Method findByFirstname, findByFirstnameAndFriend, findByFirstnameNotNull;
|
||||
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context;
|
||||
MongoConverter converter;
|
||||
@Mock MongoConverter converter;
|
||||
|
||||
MappingContext<?, MongoPersistentProperty> context;
|
||||
|
||||
@Rule public ExpectedException expection = ExpectedException.none();
|
||||
|
||||
@@ -81,8 +82,11 @@ public class MongoQueryCreatorUnitTests {
|
||||
|
||||
context = new MongoMappingContext();
|
||||
|
||||
DbRefResolver resolver = new DefaultDbRefResolver(mock(MongoDbFactory.class));
|
||||
converter = new MappingMongoConverter(resolver, context);
|
||||
doAnswer(new Answer<Object>() {
|
||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
||||
return invocation.getArguments()[0];
|
||||
}
|
||||
}).when(converter).convertToMongoType(any(), Mockito.any(TypeInformation.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -133,8 +137,8 @@ public class MongoQueryCreatorUnitTests {
|
||||
Point point = new Point(10, 20);
|
||||
Distance distance = new Distance(2.5, Metrics.KILOMETERS);
|
||||
|
||||
Query query = query(
|
||||
where("location").nearSphere(point).maxDistance(distance.getNormalizedValue()).and("firstname").is("Dave"));
|
||||
Query query = query(where("location").nearSphere(point).maxDistance(distance.getNormalizedValue()).and("firstname")
|
||||
.is("Dave"));
|
||||
assertBindsDistanceToQuery(point, distance, query);
|
||||
}
|
||||
|
||||
@@ -144,8 +148,8 @@ public class MongoQueryCreatorUnitTests {
|
||||
Point point = new Point(10, 20);
|
||||
Distance distance = new Distance(2.5);
|
||||
|
||||
Query query = query(
|
||||
where("location").near(point).maxDistance(distance.getNormalizedValue()).and("firstname").is("Dave"));
|
||||
Query query = query(where("location").near(point).maxDistance(distance.getNormalizedValue()).and("firstname")
|
||||
.is("Dave"));
|
||||
assertBindsDistanceToQuery(point, distance, query);
|
||||
}
|
||||
|
||||
@@ -237,13 +241,14 @@ public class MongoQueryCreatorUnitTests {
|
||||
public void createsQueryReferencingADBRefCorrectly() {
|
||||
|
||||
User user = new User();
|
||||
user.id = new ObjectId();
|
||||
com.mongodb.DBRef dbref = new com.mongodb.DBRef("user", "id");
|
||||
when(converter.toDBRef(eq(user), Mockito.any(MongoPersistentProperty.class))).thenReturn(dbref);
|
||||
|
||||
PartTree tree = new PartTree("findByCreator", User.class);
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, user), context);
|
||||
DBObject queryObject = creator.createQuery().getQueryObject();
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(queryObject.get("creator"), is((Object) user));
|
||||
assertThat(query, is(query(where("creator").is(dbref))));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -287,14 +292,16 @@ public class MongoQueryCreatorUnitTests {
|
||||
|
||||
private void assertBindsDistanceToQuery(Point point, Distance distance, Query reference) throws Exception {
|
||||
|
||||
when(converter.convertToMongoType("Dave")).thenReturn("Dave");
|
||||
|
||||
PartTree tree = new PartTree("findByLocationNearAndFirstname",
|
||||
org.springframework.data.mongodb.repository.Person.class);
|
||||
Method method = PersonRepository.class.getMethod("findByLocationNearAndFirstname", Point.class, Distance.class,
|
||||
String.class);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class),
|
||||
new SpelAwareProxyProjectionFactory(), new MongoMappingContext());
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod,
|
||||
new Object[] { point, distance, "Dave" });
|
||||
new MongoMappingContext());
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] { point, distance,
|
||||
"Dave" });
|
||||
|
||||
Query query = new MongoQueryCreator(tree, new ConvertingParameterAccessor(converter, accessor), context)
|
||||
.createQuery();
|
||||
@@ -675,8 +682,6 @@ public class MongoQueryCreatorUnitTests {
|
||||
|
||||
class User {
|
||||
|
||||
ObjectId id;
|
||||
|
||||
@Field("foo") String username;
|
||||
|
||||
@DBRef User creator;
|
||||
|
||||
@@ -36,8 +36,6 @@ import org.springframework.data.mongodb.repository.Address;
|
||||
import org.springframework.data.mongodb.repository.Contact;
|
||||
import org.springframework.data.mongodb.repository.Meta;
|
||||
import org.springframework.data.mongodb.repository.Person;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.repository.Repository;
|
||||
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
|
||||
|
||||
@@ -59,7 +57,10 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void detectsCollectionFromRepoTypeIfReturnTypeNotAssignable() throws Exception {
|
||||
|
||||
MongoQueryMethod queryMethod = queryMethod(SampleRepository.class, "method");
|
||||
Method method = SampleRepository.class.getMethod("method");
|
||||
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class),
|
||||
context);
|
||||
MongoEntityMetadata<?> metadata = queryMethod.getEntityInformation();
|
||||
|
||||
assertThat(metadata.getJavaType(), is(typeCompatibleWith(Address.class)));
|
||||
@@ -69,7 +70,10 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void detectsCollectionFromReturnTypeIfReturnTypeAssignable() throws Exception {
|
||||
|
||||
MongoQueryMethod queryMethod = queryMethod(SampleRepository2.class, "method");
|
||||
Method method = SampleRepository2.class.getMethod("method");
|
||||
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class),
|
||||
context);
|
||||
MongoEntityMetadata<?> entityInformation = queryMethod.getEntityInformation();
|
||||
|
||||
assertThat(entityInformation.getJavaType(), is(typeCompatibleWith(Person.class)));
|
||||
@@ -79,44 +83,34 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void discoversUserAsDomainTypeForGeoPageQueryMethod() throws Exception {
|
||||
|
||||
MongoQueryMethod queryMethod = queryMethod(PersonRepository.class, "findByLocationNear", Point.class,
|
||||
Distance.class, Pageable.class);
|
||||
MongoQueryMethod queryMethod = queryMethod("findByLocationNear", Point.class, Distance.class, Pageable.class);
|
||||
assertThat(queryMethod.isGeoNearQuery(), is(true));
|
||||
assertThat(queryMethod.isPageQuery(), is(true));
|
||||
|
||||
queryMethod = queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class);
|
||||
queryMethod = queryMethod("findByFirstname", String.class, Point.class);
|
||||
assertThat(queryMethod.isGeoNearQuery(), is(true));
|
||||
assertThat(queryMethod.isPageQuery(), is(false));
|
||||
assertThat(queryMethod.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class)));
|
||||
|
||||
assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery(),
|
||||
is(true));
|
||||
assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery(),
|
||||
is(true));
|
||||
assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery(),
|
||||
is(true));
|
||||
assertThat(queryMethod("findByEmailAddress", String.class, Point.class).isGeoNearQuery(), is(true));
|
||||
assertThat(queryMethod("findByFirstname", String.class, Point.class).isGeoNearQuery(), is(true));
|
||||
assertThat(queryMethod("findByLastname", String.class, Point.class).isGeoNearQuery(), is(true));
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsGeoPageQueryWithoutPageable() throws Exception {
|
||||
queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class);
|
||||
queryMethod("findByLocationNear", Point.class, Distance.class);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullMappingContext() throws Exception {
|
||||
|
||||
Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Point.class);
|
||||
|
||||
new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class),
|
||||
new SpelAwareProxyProjectionFactory(), null);
|
||||
new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void considersMethodReturningGeoPageAsPagingMethod() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class,
|
||||
Pageable.class);
|
||||
|
||||
MongoQueryMethod method = queryMethod("findByLocationNear", Point.class, Distance.class, Pageable.class);
|
||||
assertThat(method.isPageQuery(), is(true));
|
||||
assertThat(method.isCollectionQuery(), is(false));
|
||||
}
|
||||
@@ -124,7 +118,8 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void createsMongoQueryMethodObjectForMethodReturningAnInterface() throws Exception {
|
||||
|
||||
queryMethod(SampleRepository2.class, "methodReturningAnInterface");
|
||||
Method method = SampleRepository2.class.getMethod("methodReturningAnInterface");
|
||||
new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository2.class), context);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -133,8 +128,7 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void createsMongoQueryMethodWithEmptyMetaCorrectly() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod(PersonRepository.class, "emptyMetaAnnotation");
|
||||
|
||||
MongoQueryMethod method = queryMethod("emptyMetaAnnotation");
|
||||
assertThat(method.hasQueryMetaAttributes(), is(true));
|
||||
assertThat(method.getQueryMetaAttributes().hasValues(), is(false));
|
||||
}
|
||||
@@ -145,8 +139,7 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void createsMongoQueryMethodWithMaxExecutionTimeCorrectly() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxExecutionTime");
|
||||
|
||||
MongoQueryMethod method = queryMethod("metaWithMaxExecutionTime");
|
||||
assertThat(method.hasQueryMetaAttributes(), is(true));
|
||||
assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L));
|
||||
}
|
||||
@@ -157,8 +150,7 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void createsMongoQueryMethodWithMaxScanCorrectly() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxScan");
|
||||
|
||||
MongoQueryMethod method = queryMethod("metaWithMaxScan");
|
||||
assertThat(method.hasQueryMetaAttributes(), is(true));
|
||||
assertThat(method.getQueryMetaAttributes().getMaxScan(), is(10L));
|
||||
}
|
||||
@@ -169,8 +161,7 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void createsMongoQueryMethodWithCommentCorrectly() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithComment");
|
||||
|
||||
MongoQueryMethod method = queryMethod("metaWithComment");
|
||||
assertThat(method.hasQueryMetaAttributes(), is(true));
|
||||
assertThat(method.getQueryMetaAttributes().getComment(), is("foo bar"));
|
||||
}
|
||||
@@ -181,8 +172,7 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void createsMongoQueryMethodWithSnapshotCorrectly() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithSnapshotUsage");
|
||||
|
||||
MongoQueryMethod method = queryMethod("metaWithSnapshotUsage");
|
||||
assertThat(method.hasQueryMetaAttributes(), is(true));
|
||||
assertThat(method.getQueryMetaAttributes().getSnapshot(), is(true));
|
||||
}
|
||||
@@ -193,16 +183,14 @@ public class MongoQueryMethodUnitTests {
|
||||
@Test
|
||||
public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class);
|
||||
MongoQueryMethod method = queryMethod("deleteByUserName", String.class);
|
||||
|
||||
assertThat(method.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class)));
|
||||
}
|
||||
|
||||
private MongoQueryMethod queryMethod(Class<?> repository, String name, Class<?>... parameters) throws Exception {
|
||||
|
||||
Method method = repository.getMethod(name, parameters);
|
||||
ProjectionFactory factory = new SpelAwareProxyProjectionFactory();
|
||||
return new MongoQueryMethod(method, new DefaultRepositoryMetadata(repository), factory, context);
|
||||
private MongoQueryMethod queryMethod(String name, Class<?>... parameters) throws Exception {
|
||||
Method method = PersonRepository.class.getMethod(name, parameters);
|
||||
return new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), context);
|
||||
}
|
||||
|
||||
interface PersonRepository extends Repository<User, Long> {
|
||||
|
||||
@@ -27,6 +27,7 @@ import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Matchers;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
@@ -41,12 +42,9 @@ import org.springframework.data.mongodb.core.query.TextCriteria;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.mongodb.repository.Person;
|
||||
import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
@@ -59,6 +57,7 @@ import com.mongodb.util.JSONParseException;
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class PartTreeMongoQueryUnitTests {
|
||||
|
||||
@Mock RepositoryMetadata metadataMock;
|
||||
@Mock MongoOperations mongoOperationsMock;
|
||||
|
||||
MongoMappingContext mappingContext;
|
||||
@@ -66,8 +65,11 @@ public class PartTreeMongoQueryUnitTests {
|
||||
public @Rule ExpectedException exception = ExpectedException.none();
|
||||
|
||||
@Before
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public void setUp() {
|
||||
|
||||
when(metadataMock.getDomainType()).thenReturn((Class) Person.class);
|
||||
when(metadataMock.getReturnedDomainClass(Matchers.any(Method.class))).thenReturn((Class) Person.class);
|
||||
mappingContext = new MongoMappingContext();
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDbFactory.class));
|
||||
MongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext);
|
||||
@@ -147,52 +149,7 @@ public class PartTreeMongoQueryUnitTests {
|
||||
deriveQueryFromMethod("findByAge", new Object[] { 1 });
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1345
|
||||
*/
|
||||
@Test
|
||||
public void doesNotDeriveFieldSpecForNormalDomainType() {
|
||||
assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject(), is(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1345
|
||||
*/
|
||||
@Test
|
||||
public void restrictsQueryToFieldsRequiredForProjection() {
|
||||
|
||||
DBObject fieldsObject = deriveQueryFromMethod("findPersonProjectedBy", new Object[0]).getFieldsObject();
|
||||
|
||||
assertThat(fieldsObject.get("firstname"), is((Object) 1));
|
||||
assertThat(fieldsObject.get("lastname"), is((Object) 1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1345
|
||||
*/
|
||||
@Test
|
||||
public void restrictsQueryToFieldsRequiredForDto() {
|
||||
|
||||
DBObject fieldsObject = deriveQueryFromMethod("findPersonDtoByAge", new Object[] { 42 }).getFieldsObject();
|
||||
|
||||
assertThat(fieldsObject.get("firstname"), is((Object) 1));
|
||||
assertThat(fieldsObject.get("lastname"), is((Object) 1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1345
|
||||
*/
|
||||
@Test
|
||||
public void usesDynamicProjection() {
|
||||
|
||||
DBObject fields = deriveQueryFromMethod("findDynamicallyProjectedBy", ExtendedProjection.class).getFieldsObject();
|
||||
|
||||
assertThat(fields.get("firstname"), is((Object) 1));
|
||||
assertThat(fields.get("lastname"), is((Object) 1));
|
||||
assertThat(fields.get("age"), is((Object) 1));
|
||||
}
|
||||
|
||||
private org.springframework.data.mongodb.core.query.Query deriveQueryFromMethod(String method, Object... args) {
|
||||
private org.springframework.data.mongodb.core.query.Query deriveQueryFromMethod(String method, Object[] args) {
|
||||
|
||||
Class<?>[] types = new Class<?>[args.length];
|
||||
|
||||
@@ -211,9 +168,7 @@ public class PartTreeMongoQueryUnitTests {
|
||||
try {
|
||||
|
||||
Method method = Repo.class.getMethod(methodName, paramTypes);
|
||||
ProjectionFactory factory = new SpelAwareProxyProjectionFactory();
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(Repo.class), factory,
|
||||
mappingContext);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadataMock, mappingContext);
|
||||
|
||||
return new PartTreeMongoQuery(queryMethod, mongoOperationsMock);
|
||||
} catch (NoSuchMethodException e) {
|
||||
@@ -241,36 +196,5 @@ public class PartTreeMongoQueryUnitTests {
|
||||
|
||||
@Query(fields = "{ 'firstname }")
|
||||
Person findByAge(Integer age);
|
||||
|
||||
Person findPersonBy();
|
||||
|
||||
PersonProjection findPersonProjectedBy();
|
||||
|
||||
PersonDto findPersonDtoByAge(Integer age);
|
||||
|
||||
<T> T findDynamicallyProjectedBy(Class<T> type);
|
||||
}
|
||||
|
||||
interface PersonProjection {
|
||||
|
||||
String getFirstname();
|
||||
|
||||
String getLastname();
|
||||
}
|
||||
|
||||
interface ExtendedProjection extends PersonProjection {
|
||||
|
||||
int getAge();
|
||||
}
|
||||
|
||||
static class PersonDto {
|
||||
|
||||
public String firstname, lastname;
|
||||
|
||||
public PersonDto(String firstname, String lastname) {
|
||||
|
||||
this.firstname = firstname;
|
||||
this.lastname = lastname;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,10 +43,7 @@ import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.repository.Address;
|
||||
import org.springframework.data.mongodb.repository.Person;
|
||||
import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.repository.Repository;
|
||||
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
import org.springframework.data.repository.query.DefaultEvaluationContextProvider;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
|
||||
@@ -68,6 +65,7 @@ public class StringBasedMongoQueryUnitTests {
|
||||
SpelExpressionParser PARSER = new SpelExpressionParser();
|
||||
|
||||
@Mock MongoOperations operations;
|
||||
@Mock RepositoryMetadata metadata;
|
||||
@Mock DbRefResolver factory;
|
||||
|
||||
MongoConverter converter;
|
||||
@@ -83,7 +81,10 @@ public class StringBasedMongoQueryUnitTests {
|
||||
@Test
|
||||
public void bindsSimplePropertyCorrectly() throws Exception {
|
||||
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastname", String.class);
|
||||
Method method = SampleRepository.class.getMethod("findByLastname", String.class);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, converter.getMappingContext());
|
||||
StringBasedMongoQuery mongoQuery = new StringBasedMongoQuery(queryMethod, operations, PARSER,
|
||||
DefaultEvaluationContextProvider.INSTANCE);
|
||||
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews");
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
|
||||
@@ -365,13 +366,11 @@ public class StringBasedMongoQueryUnitTests {
|
||||
private StringBasedMongoQuery createQueryForMethod(String name, Class<?>... parameters) throws Exception {
|
||||
|
||||
Method method = SampleRepository.class.getMethod(name, parameters);
|
||||
ProjectionFactory factory = new SpelAwareProxyProjectionFactory();
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class),
|
||||
factory, converter.getMappingContext());
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, converter.getMappingContext());
|
||||
return new StringBasedMongoQuery(queryMethod, operations, PARSER, DefaultEvaluationContextProvider.INSTANCE);
|
||||
}
|
||||
|
||||
private interface SampleRepository extends Repository<Person, Long> {
|
||||
private interface SampleRepository {
|
||||
|
||||
@Query("{ 'lastname' : ?0 }")
|
||||
Person findByLastname(String lastname);
|
||||
|
||||
@@ -130,7 +130,7 @@ class StubParameterAccessor implements MongoParameterAccessor {
|
||||
public TextCriteria getFullText() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues()
|
||||
*/
|
||||
@@ -138,13 +138,4 @@ class StubParameterAccessor implements MongoParameterAccessor {
|
||||
public Object[] getValues() {
|
||||
return this.values;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.ParameterAccessor#getDynamicProjection()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getDynamicProjection() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.querydsl.core.types.Predicate;
|
||||
import com.mysema.query.types.Predicate;
|
||||
|
||||
/**
|
||||
* Integration test for {@link QueryDslMongoRepository}.
|
||||
|
||||
@@ -31,6 +31,8 @@ import org.springframework.data.mongodb.repository.QPerson;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mysema.query.mongodb.MongodbQuery;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link QuerydslRepositorySupport}.
|
||||
*
|
||||
@@ -57,8 +59,8 @@ public class QuerydslRepositorySupportTests {
|
||||
|
||||
QPerson p = QPerson.person;
|
||||
QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {};
|
||||
SpringDataMongodbQuery<Person> query = support.from(p).where(p.lastname.eq("Matthews"));
|
||||
assertThat(query.fetchOne(), is(person));
|
||||
MongodbQuery<Person> query = support.from(p).where(p.lastname.eq("Matthews"));
|
||||
assertThat(query.uniqueResult(), is(person));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -74,8 +76,8 @@ public class QuerydslRepositorySupportTests {
|
||||
QPerson p = QPerson.person;
|
||||
QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {};
|
||||
|
||||
SpringDataMongodbQuery<Person> query = support.from(p).where(p.skills.any().in("guitarist"));
|
||||
MongodbQuery<Person> query = support.from(p).where(p.skills.any().in("guitarist"));
|
||||
|
||||
assertThat(query.fetchOne(), is(person));
|
||||
assertThat(query.uniqueResult(), is(person));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,10 +37,10 @@ import org.springframework.data.mongodb.repository.QPerson;
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.querydsl.core.types.dsl.BooleanOperation;
|
||||
import com.querydsl.core.types.dsl.PathBuilder;
|
||||
import com.querydsl.core.types.dsl.SimplePath;
|
||||
import com.querydsl.core.types.dsl.StringPath;
|
||||
import com.mysema.query.types.expr.BooleanOperation;
|
||||
import com.mysema.query.types.path.PathBuilder;
|
||||
import com.mysema.query.types.path.SimplePath;
|
||||
import com.mysema.query.types.path.StringPath;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link SpringDataMongodbSerializer}.
|
||||
@@ -73,7 +73,6 @@ public class SpringDataMongodbSerializerUnitTests {
|
||||
|
||||
@Test
|
||||
public void buildsNestedKeyCorrectly() {
|
||||
|
||||
StringPath path = QPerson.person.address.street;
|
||||
assertThat(serializer.getKeyForPath(path, path.getMetadata()), is("street"));
|
||||
}
|
||||
|
||||
@@ -2,8 +2,6 @@ Bundle-SymbolicName: org.springframework.data.mongodb
|
||||
Bundle-Name: Spring Data MongoDB Support
|
||||
Bundle-Vendor: Pivotal Software, Inc.
|
||||
Bundle-ManifestVersion: 2
|
||||
Excluded-Imports:
|
||||
lombok.*
|
||||
Import-Package:
|
||||
sun.reflect;version="0";resolution:=optional
|
||||
Export-Template:
|
||||
@@ -12,7 +10,7 @@ Import-Template:
|
||||
com.fasterxml.jackson.*;version="${jackson:[=.=.=,+1.0.0)}";resolution:=optional,
|
||||
com.google.common.base.*;version="[11.0.0,14.0.0)";resolution:=optional,
|
||||
com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}",
|
||||
com.querydsl.*;version="${querydsl:[=.=.=,+1.0.0)}";resolution:=optional,
|
||||
com.mysema.query.*;version="[2.1.1, 3.0.0)";resolution:=optional,
|
||||
javax.annotation.processing.*;version="0",
|
||||
javax.enterprise.*;version="${cdi:[=.=.=,+1.0.0)}";resolution:=optional,
|
||||
javax.tools.*;version="0",
|
||||
|
||||
@@ -1,6 +1,19 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 1.8.4.RELEASE (2016-02-23)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1381 - Release 1.8.4 (Gosling SR4).
|
||||
* DATAMONGO-1380 - Improve logging in MongoChangeSetPersister.
|
||||
* DATAMONGO-1378 - Update reference documentation: Change Query.sort() to Query.with(Sort sort).
|
||||
* DATAMONGO-1377 - Update JavaDoc: Use @EnableMongoRepositories instead of @EnableJpaRepositories.
|
||||
* DATAMONGO-1376 - Move away from SimpleTypeInformationMapper.INSTANCE.
|
||||
* DATAMONGO-1375 - Fix typo in MongoOperations JavaDoc.
|
||||
* DATAMONGO-1361 - geoNear() queries fail when the accompanying query returns no results.
|
||||
* DATAMONGO-1360 - Cannot query with JSR310.
|
||||
* DATAMONGO-1270 - Update documentation to reflect deprecation of MongoFactoryBean.
|
||||
|
||||
|
||||
Changes in version 1.9.0.M1 (2016-02-12)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1380 - Improve logging in MongoChangeSetPersister.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 1.9 M1
|
||||
Spring Data MongoDB 1.8.4
|
||||
Copyright (c) [2010-2015] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
|
||||
Reference in New Issue
Block a user