Compare commits
309 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4cdd59ffd4 | ||
|
|
5160cdadb6 | ||
|
|
296ca28efa | ||
|
|
86fa1d902a | ||
|
|
80834d8321 | ||
|
|
30b0ec8f57 | ||
|
|
b157981507 | ||
|
|
bfd96d529c | ||
|
|
bf51bcd7ea | ||
|
|
f9317b9f09 | ||
|
|
99bb92db4e | ||
|
|
87d609e1c4 | ||
|
|
8f223bf12f | ||
|
|
a96253f9a4 | ||
|
|
a0884b8227 | ||
|
|
2e6efae925 | ||
|
|
4e688574fc | ||
|
|
3abd38953c | ||
|
|
026f3fadd3 | ||
|
|
14e352aeae | ||
|
|
41aa498a1e | ||
|
|
74cead91fc | ||
|
|
559f160b21 | ||
|
|
62fdd3e519 | ||
|
|
1a414fa2b6 | ||
|
|
29a712e298 | ||
|
|
65a164874a | ||
|
|
d6ff5162b3 | ||
|
|
28a84cafc5 | ||
|
|
eb10b5c426 | ||
|
|
b7b24215e3 | ||
|
|
2a27eb7404 | ||
|
|
ad3ad1f65f | ||
|
|
e7278888a7 | ||
|
|
374fa5a1f2 | ||
|
|
be92489c58 | ||
|
|
88172c8674 | ||
|
|
2dc2072fce | ||
|
|
a7b70d68d7 | ||
|
|
6dc2de03ad | ||
|
|
dc0a03ee63 | ||
|
|
504c44cdf3 | ||
|
|
9ea8b6f6d1 | ||
|
|
37c9de6af9 | ||
|
|
9a1ee81f3f | ||
|
|
c8331c3a1c | ||
|
|
b794d536c5 | ||
|
|
afa05122d2 | ||
|
|
cf06a6527c | ||
|
|
0760a8b425 | ||
|
|
f65cd1d235 | ||
|
|
6f86208111 | ||
|
|
0c33f7e01a | ||
|
|
047df58724 | ||
|
|
9716a93576 | ||
|
|
03e5ebb741 | ||
|
|
6ac908b72e | ||
|
|
5f1049f2de | ||
|
|
07c79e8ba9 | ||
|
|
a94cca3494 | ||
|
|
90842844d4 | ||
|
|
f4c27407ac | ||
|
|
00e1ebb880 | ||
|
|
dd32479ad4 | ||
|
|
ce5fe50550 | ||
|
|
3a04bcb5d3 | ||
|
|
acfe6ccc2c | ||
|
|
3f3ec19364 | ||
|
|
24cbef9bc5 | ||
|
|
b5f74444de | ||
|
|
1b7e077296 | ||
|
|
7ebaf935d4 | ||
|
|
295d7579cb | ||
|
|
a99950d83e | ||
|
|
0bf4ee2711 | ||
|
|
5b8da8dd41 | ||
|
|
b56ca97f68 | ||
|
|
f183b5c7e6 | ||
|
|
1fdde8f0c3 | ||
|
|
b7c3e69653 | ||
|
|
e64d69b8f5 | ||
|
|
d81ed203db | ||
|
|
eae463622c | ||
|
|
6ba8144bca | ||
|
|
9d1c1a9fc5 | ||
|
|
f552ca8073 | ||
|
|
d7bd82c643 | ||
|
|
078cca83e3 | ||
|
|
93ae6815bd | ||
|
|
13dcb8cda1 | ||
|
|
a4497bcf8a | ||
|
|
6a82c47a4d | ||
|
|
8f8f5b7ce4 | ||
|
|
c683813a7a | ||
|
|
dbe983c3cb | ||
|
|
8e11fe84df | ||
|
|
9eb2856840 | ||
|
|
828b379f1f | ||
|
|
161fd8c09d | ||
|
|
dc037dfef6 | ||
|
|
c41653f9da | ||
|
|
51607c5ed8 | ||
|
|
e2cbd3ee28 | ||
|
|
5944e6b57e | ||
|
|
efd46498ef | ||
|
|
3d705a737f | ||
|
|
996c57bccf | ||
|
|
a31e72ff06 | ||
|
|
f07d8fca8c | ||
|
|
69dbdee01f | ||
|
|
dedb9f3dc0 | ||
|
|
7d69b840fe | ||
|
|
4eaef300cb | ||
|
|
ec1a6b5edd | ||
|
|
adc5485c09 | ||
|
|
f622b2916d | ||
|
|
26be0cf948 | ||
|
|
e27c01fe5b | ||
|
|
d639e58fb9 | ||
|
|
0195c2cb48 | ||
|
|
068e2ec49b | ||
|
|
a9306b99ec | ||
|
|
3597194742 | ||
|
|
6f06ccec8e | ||
|
|
6fe7f220f9 | ||
|
|
45e70d493d | ||
|
|
ce71ab83f2 | ||
|
|
bf85d8facd | ||
|
|
c5ff7cdb2b | ||
|
|
f9ccf4f532 | ||
|
|
ab731f40a7 | ||
|
|
d8434fffa8 | ||
|
|
151b1d4510 | ||
|
|
168cf3e1f6 | ||
|
|
52dab0fa20 | ||
|
|
9257bab06e | ||
|
|
27f0a6f27a | ||
|
|
5bedbef2f2 | ||
|
|
51e7be8aa0 | ||
|
|
6c85bb39a3 | ||
|
|
07f7247707 | ||
|
|
f669711670 | ||
|
|
5f3671f349 | ||
|
|
1335cb699b | ||
|
|
84414b87c0 | ||
|
|
a1ecd4a501 | ||
|
|
d7e6f2ee41 | ||
|
|
04870fb8b3 | ||
|
|
9d196b78f7 | ||
|
|
4229525928 | ||
|
|
d861fecdb8 | ||
|
|
f280e23095 | ||
|
|
ed0e1d92c0 | ||
|
|
d82fc22659 | ||
|
|
6616d6788c | ||
|
|
322a7cf033 | ||
|
|
0f487c10ba | ||
|
|
11417144bd | ||
|
|
dafc59b163 | ||
|
|
566f9a80c4 | ||
|
|
89a42c5648 | ||
|
|
83ffbb00e8 | ||
|
|
84913cecab | ||
|
|
998bb09a92 | ||
|
|
cd68a8db54 | ||
|
|
df8477d180 | ||
|
|
244fbae0ce | ||
|
|
19e08a52c0 | ||
|
|
6389b1bb73 | ||
|
|
cadcbf6106 | ||
|
|
118f007ca6 | ||
|
|
cbb32bd29d | ||
|
|
9858dcd740 | ||
|
|
1fb76d135b | ||
|
|
bb62c8b2f1 | ||
|
|
2cbe7bf885 | ||
|
|
6043f6b74d | ||
|
|
ef1366592a | ||
|
|
01cf9fb8f3 | ||
|
|
285c406d5d | ||
|
|
ad29e52a57 | ||
|
|
3cfe207c83 | ||
|
|
c7e65cbc40 | ||
|
|
b8e02efb04 | ||
|
|
c7f20fb836 | ||
|
|
28a0202ef4 | ||
|
|
164e947045 | ||
|
|
7e65c0c87d | ||
|
|
9c1f753f17 | ||
|
|
0f821eb52d | ||
|
|
e3aadd63ab | ||
|
|
aa06d520df | ||
|
|
4fa1d4ba97 | ||
|
|
ba9f11b345 | ||
|
|
4777dd2e5e | ||
|
|
64b4591b72 | ||
|
|
bac5961fd8 | ||
|
|
b3cac862d8 | ||
|
|
b8ab2ad539 | ||
|
|
618d5bd5e9 | ||
|
|
096c3278b3 | ||
|
|
c9d9976c22 | ||
|
|
65497f93d4 | ||
|
|
af8a53bef6 | ||
|
|
916b856e97 | ||
|
|
7848da63f2 | ||
|
|
f359a1d31a | ||
|
|
72d645feae | ||
|
|
72fe382bba | ||
|
|
d25e840cf5 | ||
|
|
df1775572a | ||
|
|
86670cd49f | ||
|
|
31a4bf906e | ||
|
|
599291e8b7 | ||
|
|
f5a04fb9fb | ||
|
|
88558b67c3 | ||
|
|
d52cb255e0 | ||
|
|
6c214cbc37 | ||
|
|
01012c1448 | ||
|
|
4c7befb910 | ||
|
|
b62669ec8f | ||
|
|
0fb74caf9b | ||
|
|
9623dac01f | ||
|
|
695b27968c | ||
|
|
22933e4493 | ||
|
|
40aa6bbdd5 | ||
|
|
5e43f5846a | ||
|
|
2cfd4781bc | ||
|
|
031ab0c07b | ||
|
|
10f69f6623 | ||
|
|
d7b03915a7 | ||
|
|
ed55d48a53 | ||
|
|
d5ed4e0ac2 | ||
|
|
75194730e9 | ||
|
|
a09183d2eb | ||
|
|
45dd3cd988 | ||
|
|
b24e34c360 | ||
|
|
fa9b5efdab | ||
|
|
8f2ced8ada | ||
|
|
ff92cf1429 | ||
|
|
ba48290a3e | ||
|
|
70e5efd0d9 | ||
|
|
4eae229bff | ||
|
|
47f0607c49 | ||
|
|
753e794194 | ||
|
|
d27bec8ed5 | ||
|
|
c63f7f75dc | ||
|
|
84040518cf | ||
|
|
c66b9a538c | ||
|
|
a0c6b9aa64 | ||
|
|
9370c1ee01 | ||
|
|
2839e9491f | ||
|
|
6963f9e07a | ||
|
|
8dd08a36a0 | ||
|
|
a908e89ef7 | ||
|
|
5ace4032ed | ||
|
|
621b299f6f | ||
|
|
a2628d1b74 | ||
|
|
294616432d | ||
|
|
47dd512f95 | ||
|
|
f16e8d85e5 | ||
|
|
eb03ae61f2 | ||
|
|
5be66a3fee | ||
|
|
d88e4c0e3e | ||
|
|
57d1449008 | ||
|
|
8d00a0d926 | ||
|
|
e3fa844488 | ||
|
|
58bee75a6b | ||
|
|
a402395f5c | ||
|
|
9d5f8f3ba0 | ||
|
|
7ebf953063 | ||
|
|
617ebe0ca7 | ||
|
|
7f76789664 | ||
|
|
81e5919ace | ||
|
|
efd74956dc | ||
|
|
49eee40f7e | ||
|
|
8e93b844c7 | ||
|
|
3e64432f1a | ||
|
|
88c968ad36 | ||
|
|
99eefe0773 | ||
|
|
3d4569be14 | ||
|
|
57455c4a26 | ||
|
|
f9e20d12b2 | ||
|
|
4d6152c65e | ||
|
|
d81cc53c12 | ||
|
|
af4b84ea43 | ||
|
|
f9110828bc | ||
|
|
f301837be5 | ||
|
|
4d29d937eb | ||
|
|
86c11bc614 | ||
|
|
be34b4e503 | ||
|
|
ebfa2c5689 | ||
|
|
b245ef2d9e | ||
|
|
5ef40d54bc | ||
|
|
c679dba438 | ||
|
|
fd6e4000b5 | ||
|
|
c12a27a8f8 | ||
|
|
df2184f204 | ||
|
|
e9c8644d23 | ||
|
|
c730b8f479 | ||
|
|
f3b31fc467 | ||
|
|
f778b2554c | ||
|
|
9d292f64b9 | ||
|
|
8ab038f83c | ||
|
|
689552c28e | ||
|
|
9ea9912b23 | ||
|
|
a952ce5d2b | ||
|
|
b88d960893 | ||
|
|
e44d1f5f9a |
1
CONTRIBUTING.MD
Normal file
1
CONTRIBUTING.MD
Normal file
@@ -0,0 +1 @@
|
||||
You find the contribution guidelines for Spring Data projects [here](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md).
|
||||
30
README.md
30
README.md
@@ -1,6 +1,6 @@
|
||||
# Spring Data MongoDB
|
||||
|
||||
The primary goal of the [Spring Data](http://www.springsource.org/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
The primary goal of the [Spring Data](http://projects.spring.io/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer.
|
||||
|
||||
@@ -8,12 +8,12 @@ The Spring Data MongoDB project aims to provide a familiar and consistent Spring
|
||||
|
||||
For a comprehensive treatment of all the Spring Data MongoDB features, please refer to:
|
||||
|
||||
* the [User Guide](http://static.springsource.org/spring-data/data-mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://static.springsource.org/spring-data/data-mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](http://www.springsource.org/spring-data/mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use the [forum](http://forum.springsource.org/forumdisplay.php?f=80).
|
||||
* the [User Guide](http://docs.spring.io/spring-data/mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](http://projects.spring.io/spring-data-mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](http://stackoverflow.com/questions/tagged/spring-data-mongodb).
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://www.springsource.org/projects).
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://projects.spring.io/).
|
||||
|
||||
|
||||
## Quick Start
|
||||
@@ -26,7 +26,7 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
@@ -36,13 +36,13 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.4.0.BUILD-SNAPSHOT</version>
|
||||
<version>1.6.0.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>http://repo.springsource.org/libs-snapshot</url>
|
||||
<url>http://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
```
|
||||
|
||||
@@ -53,7 +53,7 @@ MongoTemplate is the central support class for Mongo database operations. It pro
|
||||
* Basic POJO mapping support to and from BSON
|
||||
* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.)
|
||||
* Connection affinity callback
|
||||
* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/dao.html#dao-exceptions).
|
||||
* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions).
|
||||
|
||||
### Spring Data repositories
|
||||
|
||||
@@ -81,7 +81,7 @@ class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new Mongo();
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -94,9 +94,9 @@ class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML:
|
||||
|
||||
```xml
|
||||
<bean id="template" class="org.springframework.data.document.mongodb.MongoTemplate">
|
||||
<bean id="template" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg>
|
||||
<bean class="com.mongodb.Mongo">
|
||||
<bean class="com.mongodb.MongoClient">
|
||||
<constructor-arg value="localhost" />
|
||||
<constructor-arg value="27017" />
|
||||
</bean>
|
||||
@@ -139,9 +139,9 @@ public class MyService {
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
* Get involved with the Spring community on the Spring Community Forums. Please help out on the [forum](http://forum.springsource.org/forumdisplay.php?f=80) by responding to questions and joining the debate.
|
||||
* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](http://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate.
|
||||
* Create [JIRA](https://jira.springframework.org/browse/DATADOC) tickets for bugs and new features and comment and vote on the ones that you are interested in.
|
||||
* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing.
|
||||
* Watch for upcoming articles on Spring by [subscribing](http://www.springsource.org/node/feed) to springframework.org
|
||||
* Watch for upcoming articles on Spring by [subscribing](http://spring.io/blog) to spring.io.
|
||||
|
||||
Before we accept a non-trivial patch or pull request we will need you to sign the [contributor's agreement](https://support.springsource.com/spring_committer_signup). Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. Active contributors might be asked to join the core team, and given the ability to merge pull requests.
|
||||
|
||||
87
pom.xml
87
pom.xml
@@ -1,24 +1,23 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.6.5.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
<description>MongoDB support for Spring Data</description>
|
||||
<url>http://www.springsource.org/spring-data/mongodb</url>
|
||||
<url>http://projects.spring.io/spring-data-mongodb</url>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.3.0.M1</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
<version>1.5.5.BUILD-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
@@ -29,10 +28,11 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.7.0.M1</springdata.commons>
|
||||
<mongo>2.11.3</mongo>
|
||||
<springdata.commons>1.9.5.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>2.12.5</mongo>
|
||||
<mongo.osgi>2.12.5</mongo.osgi>
|
||||
</properties>
|
||||
|
||||
|
||||
<developers>
|
||||
<developer>
|
||||
<id>ogierke</id>
|
||||
@@ -89,8 +89,62 @@
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>cstrobl</id>
|
||||
<name>Christoph Strobl</name>
|
||||
<email>cstrobl at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
|
||||
<id>mongo-next</id>
|
||||
<properties>
|
||||
<mongo>2.14.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo3</id>
|
||||
<properties>
|
||||
<mongo>3.0.2</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo-3-next</id>
|
||||
<properties>
|
||||
<mongo>3.0.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
<!-- MongoDB -->
|
||||
<dependency>
|
||||
@@ -99,12 +153,19 @@
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>http://repo.springsource.org/libs-milestone-local</url>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>http://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
7
spring-data-mongodb-cross-store/aop.xml
Normal file
7
spring-data-mongodb-cross-store/aop.xml
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<aspectj>
|
||||
<aspects>
|
||||
<aspect name="org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect" />
|
||||
<aspect name="org.springframework.data.mongodb.crossstore.MongoDocumentBacking" />
|
||||
</aspects>
|
||||
</aspectj>
|
||||
@@ -2,29 +2,28 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.6.5.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Persistence Support</name>
|
||||
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>1.0.0.Final</jpa>
|
||||
<jpa>2.0.0</jpa>
|
||||
<hibernate>3.6.10.Final</hibernate>
|
||||
</properties>
|
||||
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<version>${spring}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
@@ -35,24 +34,21 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.6.5.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -60,17 +56,13 @@
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>cglib</groupId>
|
||||
<artifactId>cglib</artifactId>
|
||||
<version>2.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate.javax.persistence</groupId>
|
||||
<artifactId>hibernate-jpa-2.0-api</artifactId>
|
||||
<groupId>org.eclipse.persistence</groupId>
|
||||
<artifactId>javax.persistence</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- For Tests -->
|
||||
@@ -106,7 +98,7 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>aspectj-maven-plugin</artifactId>
|
||||
<version>1.4</version>
|
||||
<version>1.6</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
@@ -135,8 +127,10 @@
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</aspectLibrary>
|
||||
</aspectLibraries>
|
||||
<complianceLevel>${source.level}</complianceLevel>
|
||||
<source>${source.level}</source>
|
||||
<target>${source.level}</target>
|
||||
<xmlConfigured>aop.xml</xmlConfigured>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -41,17 +41,13 @@ import com.mongodb.MongoException;
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
protected final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
@@ -113,12 +109,14 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
|
||||
log.debug("getPersistentId called on " + entity);
|
||||
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
Object o = entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
return o;
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
Bundle-SymbolicName: org.springframework.data.mongodb.crossstore
|
||||
Bundle-Name: Spring Data MongoDB Cross Store Support
|
||||
Bundle-Vendor: SpringSource
|
||||
Bundle-Vendor: Pivotal Software, Inc.
|
||||
Bundle-ManifestVersion: 2
|
||||
Import-Package:
|
||||
Import-Package:
|
||||
sun.reflect;version="0";resolution:=optional
|
||||
Export-Template:
|
||||
org.springframework.data.mongodb.crossstore.*;version="${project.version}"
|
||||
Import-Template:
|
||||
com.mongodb.*;version="0",
|
||||
Import-Template:
|
||||
com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}",
|
||||
javax.persistence.*;version="${jpa:[=.=.=,+1.0.0)}",
|
||||
org.aspectj.*;version="${aspectj:[1.0.0, 2.0.0)}",
|
||||
org.bson.*;version="0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<artifactId>spring-data-mongodb-distribution</artifactId>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
@@ -13,10 +13,10 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.6.5.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
<properties>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
<dist.key>SDMONGO</dist.key>
|
||||
@@ -32,6 +32,10 @@
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>wagon-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.6.5.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
Bundle-SymbolicName: org.springframework.data.mongodb.log4j
|
||||
Bundle-Name: Spring Data Mongo DB Log4J Appender
|
||||
Bundle-Vendor: SpringSource
|
||||
Bundle-Vendor: Pivotal Software, Inc.
|
||||
Bundle-ManifestVersion: 2
|
||||
Import-Package:
|
||||
sun.reflect;version="0";resolution:=optional
|
||||
Import-Template:
|
||||
com.mongodb.*;version="${mongo:[=.=,+1.0.0)}",
|
||||
com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}",
|
||||
org.apache.log4j.*;version="${log4j:[=.=.=,+1.0.0)}"
|
||||
|
||||
@@ -1,19 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<context version="7.1.9.205">
|
||||
<context version="7.1.10.209">
|
||||
<scope type="Project" name="spring-data-mongodb">
|
||||
<element type="TypeFilterReferenceOverridden" name="Filter">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.mongodb.**"/>
|
||||
</element>
|
||||
<architecture>
|
||||
<element type="Layer" name="Config">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.config.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|GridFS" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Monitoring" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Layer" name="Repositories">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.repository.**"/>
|
||||
@@ -40,10 +31,21 @@
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.config.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Implementation" type="AllowedDependency"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Config" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Layer" name="Config">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.config.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|GridFS" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Monitoring" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Layer" name="Monitoring">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.monitor.**"/>
|
||||
@@ -57,41 +59,39 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Layer" name="Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.core.**"/>
|
||||
</element>
|
||||
<element type="TypeFilter" name="Assignment"/>
|
||||
<element type="Subsystem" name="Mapping">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.mapping.**"/>
|
||||
<element type="IncludeTypePattern" name="**.core.mapping.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Geospatial">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.geo.**"/>
|
||||
<element type="IncludeTypePattern" name="**.core.geo.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Query">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.query.**"/>
|
||||
<element type="IncludeTypePattern" name="**.core.query.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Conversion">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.convert.**"/>
|
||||
<element type="IncludeTypePattern" name="**.core.convert.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="SpEL">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.spel.**"/>
|
||||
<element type="IncludeTypePattern" name="**.core.spel.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Aggregation">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.aggregation.**"/>
|
||||
<element type="IncludeTypePattern" name="**.core.aggregation.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Conversion" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
@@ -100,7 +100,7 @@
|
||||
</element>
|
||||
<element type="Subsystem" name="Index">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.index.**"/>
|
||||
<element type="IncludeTypePattern" name="**.core.index.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
@@ -116,6 +116,13 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Util">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.util.**"/>
|
||||
</element>
|
||||
<stereotype name="Unrestricted"/>
|
||||
<stereotype name="Public"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="API">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
|
||||
<name>Spring Data MongoDB - Core</name>
|
||||
@@ -11,37 +11,34 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.6.5.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<validation>1.0.0.GA</validation>
|
||||
<objenesis>1.3</objenesis>
|
||||
<equalsverifier>1.5</equalsverifier>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>${spring}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
@@ -52,10 +49,9 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-expression</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>spring-data-commons</artifactId>
|
||||
@@ -82,7 +78,7 @@
|
||||
<version>1.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- CDI -->
|
||||
<dependency>
|
||||
<groupId>javax.enterprise</groupId>
|
||||
@@ -91,21 +87,21 @@
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.el</groupId>
|
||||
<artifactId>el-api</artifactId>
|
||||
<version>${cdi}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans.test</groupId>
|
||||
<artifactId>cditest-owb</artifactId>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>servlet-api</artifactId>
|
||||
@@ -120,7 +116,7 @@
|
||||
<version>${validation}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
@@ -134,23 +130,36 @@
|
||||
<version>4.2.0.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>nl.jqno.equalsverifier</groupId>
|
||||
<artifactId>equalsverifier</artifactId>
|
||||
<version>${equalsverifier}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
|
||||
<plugin>
|
||||
<groupId>com.mysema.maven</groupId>
|
||||
<artifactId>apt-maven-plugin</artifactId>
|
||||
<version>1.0.8</version>
|
||||
<version>${apt}</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
@@ -187,9 +196,14 @@
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,13 +28,16 @@ import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.support.CachingIsNewStrategyFactory;
|
||||
@@ -50,6 +53,7 @@ import com.mongodb.Mongo;
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Ryan Tenney
|
||||
*/
|
||||
@Configuration
|
||||
public abstract class AbstractMongoConfiguration {
|
||||
@@ -101,7 +105,7 @@ public abstract class AbstractMongoConfiguration {
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public SimpleMongoDbFactory mongoDbFactory() throws Exception {
|
||||
public MongoDbFactory mongoDbFactory() throws Exception {
|
||||
return new SimpleMongoDbFactory(mongo(), getDatabaseName(), getUserCredentials(), getAuthenticationDatabaseName());
|
||||
}
|
||||
|
||||
@@ -115,7 +119,9 @@ public abstract class AbstractMongoConfiguration {
|
||||
* entities.
|
||||
*/
|
||||
protected String getMappingBasePackage() {
|
||||
return getClass().getPackage().getName();
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -141,10 +147,7 @@ public abstract class AbstractMongoConfiguration {
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
|
||||
if (abbreviateFieldNames()) {
|
||||
mappingContext.setFieldNamingStrategy(new CamelCaseAbbreviatingFieldNamingStrategy());
|
||||
}
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
@@ -229,4 +232,15 @@ public abstract class AbstractMongoConfiguration {
|
||||
protected boolean abbreviateFieldNames() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
protected FieldNamingStrategy fieldNamingStrategy() {
|
||||
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
|
||||
: PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,13 +24,14 @@ package org.springframework.data.mongodb.config;
|
||||
*/
|
||||
public abstract class BeanNames {
|
||||
|
||||
static final String MAPPING_CONTEXT = "mappingContext";
|
||||
static final String INDEX_HELPER = "indexCreationHelper";
|
||||
static final String MONGO = "mongo";
|
||||
static final String DB_FACTORY = "mongoDbFactory";
|
||||
static final String VALIDATING_EVENT_LISTENER = "validatingMongoEventListener";
|
||||
static final String IS_NEW_STRATEGY_FACTORY = "isNewStrategyFactory";
|
||||
public static final String MAPPING_CONTEXT_BEAN_NAME = "mongoMappingContext";
|
||||
|
||||
static final String INDEX_HELPER_BEAN_NAME = "indexCreationHelper";
|
||||
static final String MONGO_BEAN_NAME = "mongo";
|
||||
static final String DB_FACTORY_BEAN_NAME = "mongoDbFactory";
|
||||
static final String VALIDATING_EVENT_LISTENER_BEAN_NAME = "validatingMongoEventListener";
|
||||
static final String IS_NEW_STRATEGY_FACTORY_BEAN_NAME = "isNewStrategyFactory";
|
||||
static final String DEFAULT_CONVERTER_BEAN_NAME = "mappingConverter";
|
||||
static final String MONGO_TEMPLATE = "mongoTemplate";
|
||||
static final String GRID_FS_TEMPLATE = "gridFsTemplate";
|
||||
static final String MONGO_TEMPLATE_BEAN_NAME = "mongoTemplate";
|
||||
static final String GRID_FS_TEMPLATE_BEAN_NAME = "gridFsTemplate";
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,7 +43,7 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
throws BeanDefinitionStoreException {
|
||||
|
||||
String id = super.resolveId(element, definition, parserContext);
|
||||
return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE;
|
||||
return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -57,13 +57,14 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
String converterRef = element.getAttribute("converter-ref");
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
String bucket = element.getAttribute("bucket");
|
||||
|
||||
BeanDefinitionBuilder gridFsTemplateBuilder = BeanDefinitionBuilder.genericBeanDefinition(GridFsTemplate.class);
|
||||
|
||||
if (StringUtils.hasText(dbFactoryRef)) {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(dbFactoryRef);
|
||||
} else {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(BeanNames.DB_FACTORY);
|
||||
gridFsTemplateBuilder.addConstructorArgReference(BeanNames.DB_FACTORY_BEAN_NAME);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(converterRef)) {
|
||||
@@ -72,7 +73,11 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(BeanNames.DEFAULT_CONVERTER_BEAN_NAME);
|
||||
}
|
||||
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(gridFsTemplateBuilder, BeanNames.GRID_FS_TEMPLATE)
|
||||
if (StringUtils.hasText(bucket)) {
|
||||
gridFsTemplateBuilder.addConstructorArgValue(bucket);
|
||||
}
|
||||
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(gridFsTemplateBuilder, BeanNames.GRID_FS_TEMPLATE_BEAN_NAME)
|
||||
.getBeanDefinition();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,6 +30,7 @@ import org.springframework.beans.factory.config.BeanDefinitionHolder;
|
||||
import org.springframework.beans.factory.config.RuntimeBeanReference;
|
||||
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
|
||||
import org.springframework.beans.factory.parsing.CompositeComponentDefinition;
|
||||
import org.springframework.beans.factory.parsing.ReaderContext;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
@@ -51,10 +52,10 @@ import org.springframework.core.type.filter.TypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
|
||||
import org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener;
|
||||
@@ -71,11 +72,12 @@ import org.w3c.dom.Element;
|
||||
* @author Oliver Gierke
|
||||
* @author Maciej Walkowiak
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
private static final String BASE_PACKAGE = "base-package";
|
||||
private static final boolean jsr303Present = ClassUtils.isPresent("javax.validation.Validator",
|
||||
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator",
|
||||
MappingMongoConverterParser.class.getClassLoader());
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -83,10 +85,13 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
*/
|
||||
public BeanDefinition parse(Element element, ParserContext parserContext) {
|
||||
|
||||
BeanDefinitionRegistry registry = parserContext.getRegistry();
|
||||
if (parserContext.isNested()) {
|
||||
parserContext.getReaderContext().error("Mongo Converter must not be defined as nested bean.", element);
|
||||
}
|
||||
|
||||
BeanDefinitionRegistry registry = parserContext.getRegistry();
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : "mappingConverter";
|
||||
id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME;
|
||||
|
||||
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element));
|
||||
|
||||
@@ -98,7 +103,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
// Need a reference to a Mongo instance
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
if (!StringUtils.hasText(dbFactoryRef)) {
|
||||
dbFactoryRef = DB_FACTORY;
|
||||
dbFactoryRef = DB_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
// Converter
|
||||
@@ -116,10 +121,10 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
}
|
||||
|
||||
try {
|
||||
registry.getBeanDefinition(INDEX_HELPER);
|
||||
registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME);
|
||||
} catch (NoSuchBeanDefinitionException ignored) {
|
||||
if (!StringUtils.hasText(dbFactoryRef)) {
|
||||
dbFactoryRef = DB_FACTORY;
|
||||
dbFactoryRef = DB_FACTORY_BEAN_NAME;
|
||||
}
|
||||
BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoPersistentEntityIndexCreator.class);
|
||||
@@ -128,14 +133,14 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
indexHelperBuilder.addDependsOn(ctxRef);
|
||||
|
||||
parserContext.registerBeanComponent(new BeanComponentDefinition(indexHelperBuilder.getBeanDefinition(),
|
||||
INDEX_HELPER));
|
||||
INDEX_HELPER_BEAN_NAME));
|
||||
}
|
||||
|
||||
BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext);
|
||||
|
||||
if (validatingMongoEventListener != null) {
|
||||
parserContext.registerBeanComponent(new BeanComponentDefinition(validatingMongoEventListener,
|
||||
VALIDATING_EVENT_LISTENER));
|
||||
VALIDATING_EVENT_LISTENER_BEAN_NAME));
|
||||
}
|
||||
|
||||
parserContext.registerBeanComponent(new BeanComponentDefinition(converterBuilder.getBeanDefinition(), id));
|
||||
@@ -166,7 +171,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
private RuntimeBeanReference getValidator(Object source, ParserContext parserContext) {
|
||||
|
||||
if (!jsr303Present) {
|
||||
if (!JSR_303_PRESENT) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -180,7 +185,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return new RuntimeBeanReference(validatorName);
|
||||
}
|
||||
|
||||
static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
BeanDefinition conversionsDefinition, String converterId) {
|
||||
|
||||
String ctxRef = element.getAttribute("mapping-context-ref");
|
||||
@@ -195,7 +200,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
BeanDefinitionBuilder mappingContextBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoMappingContext.class);
|
||||
|
||||
Set<String> classesToAdd = getInititalEntityClasses(element, mappingContextBuilder);
|
||||
Set<String> classesToAdd = getInititalEntityClasses(element);
|
||||
|
||||
if (classesToAdd != null) {
|
||||
mappingContextBuilder.addPropertyValue("initialEntitySet", classesToAdd);
|
||||
}
|
||||
@@ -208,18 +214,43 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
String abbreviateFieldNames = element.getAttribute("abbreviate-field-names");
|
||||
if ("true".equals(abbreviateFieldNames)) {
|
||||
mappingContextBuilder.addPropertyValue("fieldNamingStrategy", new RootBeanDefinition(
|
||||
CamelCaseAbbreviatingFieldNamingStrategy.class));
|
||||
}
|
||||
parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder);
|
||||
|
||||
ctxRef = converterId + "." + MAPPING_CONTEXT;
|
||||
ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME
|
||||
: converterId + "." + MAPPING_CONTEXT_BEAN_NAME;
|
||||
|
||||
parserContext.registerBeanComponent(componentDefinitionBuilder.getComponent(mappingContextBuilder, ctxRef));
|
||||
return ctxRef;
|
||||
}
|
||||
|
||||
private static void parseFieldNamingStrategy(Element element, ReaderContext context, BeanDefinitionBuilder builder) {
|
||||
|
||||
String abbreviateFieldNames = element.getAttribute("abbreviate-field-names");
|
||||
String fieldNamingStrategy = element.getAttribute("field-naming-strategy-ref");
|
||||
|
||||
boolean fieldNamingStrategyReferenced = StringUtils.hasText(fieldNamingStrategy);
|
||||
boolean abbreviationActivated = StringUtils.hasText(abbreviateFieldNames)
|
||||
&& Boolean.parseBoolean(abbreviateFieldNames);
|
||||
|
||||
if (fieldNamingStrategyReferenced && abbreviationActivated) {
|
||||
context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!",
|
||||
element);
|
||||
return;
|
||||
}
|
||||
|
||||
Object value = null;
|
||||
|
||||
if ("true".equals(abbreviateFieldNames)) {
|
||||
value = new RootBeanDefinition(CamelCaseAbbreviatingFieldNamingStrategy.class);
|
||||
} else if (fieldNamingStrategyReferenced) {
|
||||
value = new RuntimeBeanReference(fieldNamingStrategy);
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
builder.addPropertyValue("fieldNamingStrategy", value);
|
||||
}
|
||||
}
|
||||
|
||||
private BeanDefinition getCustomConversions(Element element, ParserContext parserContext) {
|
||||
|
||||
List<Element> customConvertersElements = DomUtils.getChildElementsByTagName(element, "custom-converters");
|
||||
@@ -262,7 +293,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return null;
|
||||
}
|
||||
|
||||
private static Set<String> getInititalEntityClasses(Element element, BeanDefinitionBuilder builder) {
|
||||
private static Set<String> getInititalEntityClasses(Element element) {
|
||||
|
||||
String basePackage = element.getAttribute(BASE_PACKAGE);
|
||||
|
||||
@@ -309,9 +340,10 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef);
|
||||
|
||||
BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context);
|
||||
context.registerBeanComponent(builder.getComponent(mappingContextStrategyFactoryBuilder, IS_NEW_STRATEGY_FACTORY));
|
||||
context.registerBeanComponent(builder.getComponent(mappingContextStrategyFactoryBuilder,
|
||||
IS_NEW_STRATEGY_FACTORY_BEAN_NAME));
|
||||
|
||||
return IS_NEW_STRATEGY_FACTORY;
|
||||
return IS_NEW_STRATEGY_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,14 +15,19 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.config.IsNewAwareAuditingHandlerBeanDefinitionParser;
|
||||
import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
@@ -58,23 +63,24 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
||||
@Override
|
||||
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
|
||||
|
||||
BeanDefinitionRegistry registry = parserContext.getRegistry();
|
||||
String mappingContextRef = element.getAttribute("mapping-context-ref");
|
||||
|
||||
if (!registry.containsBeanDefinition(BeanNames.IS_NEW_STRATEGY_FACTORY)) {
|
||||
if (!StringUtils.hasText(mappingContextRef)) {
|
||||
|
||||
String mappingContextName = BeanNames.MAPPING_CONTEXT;
|
||||
BeanDefinitionRegistry registry = parserContext.getRegistry();
|
||||
|
||||
if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT)) {
|
||||
mappingContextName = MappingMongoConverterParser.potentiallyCreateMappingContext(element, parserContext, null,
|
||||
BeanNames.DEFAULT_CONVERTER_BEAN_NAME);
|
||||
if (!registry.containsBeanDefinition(MAPPING_CONTEXT_BEAN_NAME)) {
|
||||
registry.registerBeanDefinition(MAPPING_CONTEXT_BEAN_NAME, new RootBeanDefinition(MongoMappingContext.class));
|
||||
}
|
||||
|
||||
MappingMongoConverterParser.createIsNewStrategyFactoryBeanDefinition(mappingContextName, parserContext, element);
|
||||
mappingContextRef = MAPPING_CONTEXT_BEAN_NAME;
|
||||
}
|
||||
|
||||
BeanDefinitionParser parser = new IsNewAwareAuditingHandlerBeanDefinitionParser(BeanNames.IS_NEW_STRATEGY_FACTORY);
|
||||
BeanDefinition handlerBeanDefinition = parser.parse(element, parserContext);
|
||||
IsNewAwareAuditingHandlerBeanDefinitionParser parser = new IsNewAwareAuditingHandlerBeanDefinitionParser(
|
||||
mappingContextRef);
|
||||
parser.parse(element, parserContext);
|
||||
|
||||
builder.addConstructorArgValue(handlerBeanDefinition);
|
||||
builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,17 +15,22 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.springframework.beans.factory.config.BeanDefinition.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AnnotationAuditingConfiguration;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
import org.springframework.data.config.ParsingUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -47,6 +52,15 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
return EnableMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||
*/
|
||||
@Override
|
||||
protected String getAuditingHandlerBeanName() {
|
||||
return "mongoAuditingHandler";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
@@ -57,22 +71,22 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
registerIsNewStrategyFactoryIfNecessary(registry);
|
||||
defaultDependenciesIfNecessary(registry, annotationMetadata);
|
||||
super.registerBeanDefinitions(annotationMetadata, registry);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AnnotationAuditingConfiguration)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AnnotationAuditingConfiguration configuration) {
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AnnotationAuditingConfiguration must not be null!");
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||
|
||||
return configureDefaultAuditHandlerAttributes(configuration,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class)).addConstructorArgReference(
|
||||
BeanNames.IS_NEW_STRATEGY_FACTORY);
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class);
|
||||
builder.addConstructorArgReference(MAPPING_CONTEXT_BEAN_NAME);
|
||||
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -86,20 +100,31 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
registerInfrastructureBeanWithId(BeanDefinitionBuilder.rootBeanDefinition(AuditingEventListener.class)
|
||||
.addConstructorArgValue(auditingHandlerDefinition).getRawBeanDefinition(),
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEventListener.class);
|
||||
listenerBeanDefinitionBuilder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(
|
||||
getAuditingHandlerBeanName(), registry));
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param registry, the {@link BeanDefinitionRegistry} to use to register an {@link IsNewStrategyFactory} to.
|
||||
* Register default bean definitions for a {@link MongoMappingContext} and an {@link IsNewStrategyFactory} in case we
|
||||
* don't find beans with the assumed names in the registry.
|
||||
*
|
||||
* @param registry the {@link BeanDefinitionRegistry} to use to register the components into.
|
||||
* @param source the source which the registered components shall be registered with
|
||||
*/
|
||||
private void registerIsNewStrategyFactoryIfNecessary(BeanDefinitionRegistry registry) {
|
||||
private void defaultDependenciesIfNecessary(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
if (!registry.containsBeanDefinition(BeanNames.IS_NEW_STRATEGY_FACTORY)) {
|
||||
registry.registerBeanDefinition(BeanNames.IS_NEW_STRATEGY_FACTORY,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(MappingContextIsNewStrategyFactory.class)
|
||||
.addConstructorArgReference(BeanNames.MAPPING_CONTEXT).getBeanDefinition());
|
||||
if (!registry.containsBeanDefinition(MAPPING_CONTEXT_BEAN_NAME)) {
|
||||
|
||||
RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class);
|
||||
definition.setRole(ROLE_INFRASTRUCTURE);
|
||||
definition.setSource(source);
|
||||
|
||||
registry.registerBeanDefinition(MAPPING_CONTEXT_BEAN_NAME, definition);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
* Copyright 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -54,7 +54,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
throws BeanDefinitionStoreException {
|
||||
|
||||
String id = super.resolveId(element, definition, parserContext);
|
||||
return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY;
|
||||
return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -103,7 +103,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
BeanComponentDefinition component = helper.getComponent(writeConcernPropertyEditorBuilder);
|
||||
parserContext.registerBeanComponent(component);
|
||||
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(dbFactoryBuilder, BeanNames.DB_FACTORY)
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(dbFactoryBuilder, BeanNames.DB_FACTORY_BEAN_NAME)
|
||||
.getBeanDefinition();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,9 +16,6 @@
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.beans.factory.xml.NamespaceHandlerSupport;
|
||||
import org.springframework.data.mongodb.repository.config.MongoRepositoryConfigurationExtension;
|
||||
import org.springframework.data.repository.config.RepositoryBeanDefinitionParser;
|
||||
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
|
||||
|
||||
/**
|
||||
* {@link org.springframework.beans.factory.xml.NamespaceHandler} for Mongo DB configuration.
|
||||
@@ -34,10 +31,6 @@ public class MongoNamespaceHandler extends NamespaceHandlerSupport {
|
||||
*/
|
||||
public void init() {
|
||||
|
||||
RepositoryConfigurationExtension extension = new MongoRepositoryConfigurationExtension();
|
||||
RepositoryBeanDefinitionParser repositoryBeanDefinitionParser = new RepositoryBeanDefinitionParser(extension);
|
||||
|
||||
registerBeanDefinitionParser("repositories", repositoryBeanDefinitionParser);
|
||||
registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser());
|
||||
registerBeanDefinitionParser("mongo", new MongoParser());
|
||||
registerBeanDefinitionParser("db-factory", new MongoDbFactoryParser());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -58,7 +58,7 @@ public class MongoParser implements BeanDefinitionParser {
|
||||
MongoParsingUtils.parseMongoOptions(element, builder);
|
||||
MongoParsingUtils.parseReplicaSet(element, builder);
|
||||
|
||||
String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO;
|
||||
String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME;
|
||||
|
||||
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mongo", source));
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -35,6 +35,7 @@ import org.w3c.dom.Element;
|
||||
* {@link BeanDefinitionParser} to parse {@code template} elements into {@link BeanDefinition}s.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
@@ -47,7 +48,7 @@ class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
throws BeanDefinitionStoreException {
|
||||
|
||||
String id = super.resolveId(element, definition, parserContext);
|
||||
return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE;
|
||||
return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -68,7 +69,7 @@ class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
if (StringUtils.hasText(dbFactoryRef)) {
|
||||
mongoTemplateBuilder.addConstructorArgReference(dbFactoryRef);
|
||||
} else {
|
||||
mongoTemplateBuilder.addConstructorArgReference(BeanNames.DB_FACTORY);
|
||||
mongoTemplateBuilder.addConstructorArgReference(BeanNames.DB_FACTORY_BEAN_NAME);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(converterRef)) {
|
||||
@@ -80,7 +81,7 @@ class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
BeanComponentDefinition component = helper.getComponent(writeConcernPropertyEditorBuilder);
|
||||
parserContext.registerBeanComponent(component);
|
||||
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(mongoTemplateBuilder, BeanNames.MONGO_TEMPLATE)
|
||||
.getBeanDefinition();
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(mongoTemplateBuilder,
|
||||
BeanNames.MONGO_TEMPLATE_BEAN_NAME).getBeanDefinition();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,12 +16,14 @@
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ServerAddress;
|
||||
@@ -35,6 +37,11 @@ import com.mongodb.ServerAddress;
|
||||
*/
|
||||
public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
@@ -77,22 +84,53 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
*/
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
String[] hostAndPort = StringUtils.delimitedListToStringArray(source.trim(), ":");
|
||||
if (!StringUtils.hasText(source)) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(source) || hostAndPort.length > 2) {
|
||||
LOG.warn("Could not parse address source '{}'. Check your replica set configuration!", source);
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return hostAndPort.length == 1 ? new ServerAddress(hostAndPort[0]) : new ServerAddress(hostAndPort[0],
|
||||
Integer.parseInt(hostAndPort[1]));
|
||||
InetAddress hostAddress = InetAddress.getByName(hostAndPort[0]);
|
||||
Integer port = hostAndPort.length == 1 ? null : Integer.parseInt(hostAndPort[1]);
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
LOG.warn("Could not parse host '{}'. Check your replica set configuration!", hostAndPort[0]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
LOG.warn("Could not parse port '{}'. Check your replica set configuration!", hostAndPort[1]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the host and port from the given {@link String}.
|
||||
*
|
||||
* @param addressAndPortSource must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
||||
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null!");
|
||||
|
||||
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
||||
String hostAddress = hostAndPort[0];
|
||||
|
||||
if (isHostAddressInIPv6BracketNotation(hostAddress)) {
|
||||
hostAndPort[0] = hostAddress.substring(1, hostAddress.length() - 1);
|
||||
}
|
||||
|
||||
return hostAndPort;
|
||||
}
|
||||
|
||||
private boolean isHostAddressInIPv6BracketNotation(String hostAddress) {
|
||||
return hostAddress.startsWith("[") && hostAddress.endsWith("]");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ import com.mongodb.DBCursor;
|
||||
interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns th eprepared cursor.
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param cursor
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.core;
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
@@ -36,11 +38,13 @@ import com.mongodb.MongoException;
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Komi Innocent
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
private static final Collection<String> TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere");
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
@@ -140,8 +144,15 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if ("2d".equals(value)) {
|
||||
if (TWO_D_IDENTIFIERS.contains(value)) {
|
||||
indexFields.add(IndexField.geo(key));
|
||||
} else if ("text".equals(value)) {
|
||||
|
||||
DBObject weights = (DBObject) ix.get("weights");
|
||||
for (String fieldName : weights.keySet()) {
|
||||
indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString())));
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
@@ -159,8 +170,8 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
boolean unique = ix.containsField("unique") ? (Boolean) ix.get("unique") : false;
|
||||
boolean dropDuplicates = ix.containsField("dropDups") ? (Boolean) ix.get("dropDups") : false;
|
||||
boolean sparse = ix.containsField("sparse") ? (Boolean) ix.get("sparse") : false;
|
||||
|
||||
indexInfoList.add(new IndexInfo(indexFields, name, unique, dropDuplicates, sparse));
|
||||
String language = ix.containsField("default_language") ? (String) ix.get("default_language") : "";
|
||||
indexInfoList.add(new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language));
|
||||
}
|
||||
|
||||
return indexInfoList;
|
||||
|
||||
@@ -49,7 +49,7 @@ public class MongoAction {
|
||||
* @param collectionName the collection name, must not be {@literal null} or empty.
|
||||
* @param entityType the POJO that is being operated against
|
||||
* @param document the converted DBObject from the POJO or Spring Update object
|
||||
* @param query the converted DBOjbect from the Spring Query object
|
||||
* @param query the converted DBObject from the Spring Query object
|
||||
*/
|
||||
public MongoAction(WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation,
|
||||
String collectionName, Class<?> entityType, DBObject document, DBObject query) {
|
||||
|
||||
@@ -1,16 +1,34 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
|
||||
/**
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface MongoAdminOperations {
|
||||
|
||||
@ManagedOperation
|
||||
public abstract void dropDatabase(String databaseName);
|
||||
void dropDatabase(String databaseName);
|
||||
|
||||
@ManagedOperation
|
||||
public abstract void createDatabase(String databaseName);
|
||||
void createDatabase(String databaseName);
|
||||
|
||||
@ManagedOperation
|
||||
public abstract String getDatabaseStats(String databaseName);
|
||||
|
||||
}
|
||||
String getDatabaseStats(String databaseName);
|
||||
}
|
||||
|
||||
@@ -23,11 +23,15 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
|
||||
import com.mongodb.MongoCursorNotFoundException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoException.CursorNotFound;
|
||||
import com.mongodb.MongoException.DuplicateKey;
|
||||
import com.mongodb.MongoException.Network;
|
||||
import com.mongodb.MongoInternalException;
|
||||
import com.mongodb.MongoServerSelectionException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.MongoTimeoutException;
|
||||
|
||||
/**
|
||||
* Simple {@link PersistenceExceptionTranslator} for Mongo. Convert the given runtime exception to an appropriate
|
||||
@@ -47,16 +51,23 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
|
||||
// Check for well-known MongoException subclasses.
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof DuplicateKey) {
|
||||
if (ex instanceof DuplicateKey || ex instanceof DuplicateKeyException) {
|
||||
return new DuplicateKeyException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof Network) {
|
||||
if (ex instanceof Network || ex instanceof MongoSocketException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof CursorNotFound) {
|
||||
if (ex instanceof CursorNotFound || ex instanceof MongoCursorNotFoundException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoServerSelectionException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoTimeoutException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
@@ -64,6 +75,7 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new InvalidDataAccessResourceUsageException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
int code = ((MongoException) ex).getCode();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
@@ -19,12 +19,11 @@ import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupBy;
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupByResults;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
@@ -50,6 +49,8 @@ import com.mongodb.WriteResult;
|
||||
* @author Oliver Gierke
|
||||
* @author Tobias Trelle
|
||||
* @author Chuong Ngo
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface MongoOperations {
|
||||
|
||||
@@ -413,8 +414,10 @@ public interface MongoOperations {
|
||||
MapReduceOptions mapReduceOptions, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResult} for all entities matching the given {@link NearQuery}. Will consider entity mapping
|
||||
* information to determine the collection the query is ran against.
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Will consider entity mapping
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
@@ -423,7 +426,9 @@ public interface MongoOperations {
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResult} for all entities matching the given {@link NearQuery}.
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
@@ -468,10 +473,32 @@ public interface MongoOperations {
|
||||
*/
|
||||
<T> T findOne(Query query, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Determine result of given {@link Query} contains at least one element.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find a record.
|
||||
* @param collectionName name of the collection to check for objects.
|
||||
* @return
|
||||
*/
|
||||
boolean exists(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Determine result of given {@link Query} contains at least one element.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find a record.
|
||||
* @param entityClass the parameterized type.
|
||||
* @return
|
||||
*/
|
||||
boolean exists(Query query, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Determine result of given {@link Query} contains at least one element.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find a record.
|
||||
* @param entityClass the parameterized type.
|
||||
* @param collectionName name of the collection to check for objects.
|
||||
* @return
|
||||
*/
|
||||
boolean exists(Query query, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -529,12 +556,58 @@ public interface MongoOperations {
|
||||
*/
|
||||
<T> T findById(Object id, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param entityClass the parameterized type.
|
||||
* @return
|
||||
*/
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param entityClass the parameterized type.
|
||||
* @param collectionName the collection to query.
|
||||
* @return
|
||||
*/
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parameterized type.
|
||||
* @return
|
||||
*/
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parameterized type.
|
||||
* @param collectionName the collection to query.
|
||||
* @return
|
||||
*/
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
@@ -583,14 +656,28 @@ public interface MongoOperations {
|
||||
long count(Query query, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection.
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. TO make sure the query gets mapped, use {@link #count(Query, Class, String)}.
|
||||
*
|
||||
* @param query
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @see #count(Query, Class, String)
|
||||
*/
|
||||
long count(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
long count(Query query, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <p/>
|
||||
@@ -598,9 +685,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Spring 3.0's new Type Conversion API.
|
||||
* See <a href="http://static.springsource.org/spring/docs/3.0.x/reference/validation.html#core-convert">Spring 3 Type
|
||||
* Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -655,9 +742,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Spring 3.0's new Type Conversion API.
|
||||
* See <a href="http://static.springsource.org/spring/docs/3.0.x/reference/validation.html#core-convert">Spring 3 Type
|
||||
* Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection
|
||||
*/
|
||||
@@ -672,9 +759,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Spring 3.0's new Type Cobnversion API.
|
||||
* See <a href="http://static.springsource.org/spring/docs/3.0.x/reference/validation.html#core-convert">Spring 3 Type
|
||||
* Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Cobnversion API. See <a
|
||||
* http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert">Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection
|
||||
* @param collectionName name of the collection to store the object in
|
||||
@@ -795,7 +882,7 @@ public interface MongoOperations {
|
||||
*
|
||||
* @param object
|
||||
*/
|
||||
void remove(Object object);
|
||||
WriteResult remove(Object object);
|
||||
|
||||
/**
|
||||
* Removes the given object from the given collection.
|
||||
@@ -803,7 +890,7 @@ public interface MongoOperations {
|
||||
* @param object
|
||||
* @param collection must not be {@literal null} or empty.
|
||||
*/
|
||||
void remove(Object object, String collection);
|
||||
WriteResult remove(Object object, String collection);
|
||||
|
||||
/**
|
||||
* Remove all documents that match the provided query document criteria from the the collection used to store the
|
||||
@@ -812,9 +899,17 @@ public interface MongoOperations {
|
||||
* @param query
|
||||
* @param entityClass
|
||||
*/
|
||||
void remove(Query query, Class<?> entityClass);
|
||||
WriteResult remove(Query query, Class<?> entityClass);
|
||||
|
||||
void remove(Query query, Class<?> entityClass, String collectionName);
|
||||
/**
|
||||
* Remove all documents that match the provided query document criteria from the the collection used to store the
|
||||
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
|
||||
*
|
||||
* @param query
|
||||
* @param entityClass
|
||||
* @param collectionName
|
||||
*/
|
||||
WriteResult remove(Query query, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove all documents from the specified collection that match the provided query document criteria. There is no
|
||||
@@ -823,7 +918,40 @@ public interface MongoOperations {
|
||||
* @param query the query document that specifies the criteria used to remove a record
|
||||
* @param collectionName name of the collection where the objects will removed
|
||||
*/
|
||||
void remove(Query query, String collectionName);
|
||||
WriteResult remove(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns and removes all documents form the specified collection that match the provided query.
|
||||
*
|
||||
* @param query
|
||||
* @param collectionName
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
<T> List<T> findAllAndRemove(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns and removes all documents matching the given query form the collection used to store the entityClass.
|
||||
*
|
||||
* @param query
|
||||
* @param entityClass
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
<T> List<T> findAllAndRemove(Query query, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns and removes all documents that match the provided query document criteria from the the collection used to
|
||||
* store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in
|
||||
* the query.
|
||||
*
|
||||
* @param query
|
||||
* @param entityClass
|
||||
* @param collectionName
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
<T> List<T> findAllAndRemove(Query query, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns the underlying {@link MongoConverter}.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,27 +30,28 @@ import com.mongodb.MongoOptions;
|
||||
* @author Mike Saavedra
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, InitializingBean {
|
||||
|
||||
@SuppressWarnings("deprecation")//
|
||||
private final MongoOptions MONGO_OPTIONS = new MongoOptions();
|
||||
private static final MongoOptions DEFAULT_MONGO_OPTIONS = new MongoOptions();
|
||||
|
||||
private int connectionsPerHost = MONGO_OPTIONS.connectionsPerHost;
|
||||
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier;
|
||||
private int maxWaitTime = MONGO_OPTIONS.maxWaitTime;
|
||||
private int connectTimeout = MONGO_OPTIONS.connectTimeout;
|
||||
private int socketTimeout = MONGO_OPTIONS.socketTimeout;
|
||||
private boolean socketKeepAlive = MONGO_OPTIONS.socketKeepAlive;
|
||||
private boolean autoConnectRetry = MONGO_OPTIONS.autoConnectRetry;
|
||||
private long maxAutoConnectRetryTime = MONGO_OPTIONS.maxAutoConnectRetryTime;
|
||||
private int writeNumber;
|
||||
private int writeTimeout;
|
||||
private boolean writeFsync;
|
||||
@SuppressWarnings("deprecation") private boolean slaveOk = MONGO_OPTIONS.slaveOk;
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.connectionsPerHost;
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier;
|
||||
private int maxWaitTime = DEFAULT_MONGO_OPTIONS.maxWaitTime;
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.connectTimeout;
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.socketTimeout;
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.socketKeepAlive;
|
||||
private boolean autoConnectRetry = DEFAULT_MONGO_OPTIONS.autoConnectRetry;
|
||||
private long maxAutoConnectRetryTime = DEFAULT_MONGO_OPTIONS.maxAutoConnectRetryTime;
|
||||
private int writeNumber = DEFAULT_MONGO_OPTIONS.w;
|
||||
private int writeTimeout = DEFAULT_MONGO_OPTIONS.wtimeout;
|
||||
private boolean writeFsync = DEFAULT_MONGO_OPTIONS.fsync;
|
||||
private boolean slaveOk = DEFAULT_MONGO_OPTIONS.slaveOk;
|
||||
private boolean ssl;
|
||||
private SSLSocketFactory sslSocketFactory;
|
||||
|
||||
private MongoOptions options;
|
||||
|
||||
/**
|
||||
* Configures the maximum number of connections allowed per host until we will block.
|
||||
*
|
||||
@@ -197,24 +198,28 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public void afterPropertiesSet() {
|
||||
|
||||
MONGO_OPTIONS.connectionsPerHost = connectionsPerHost;
|
||||
MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
MONGO_OPTIONS.maxWaitTime = maxWaitTime;
|
||||
MONGO_OPTIONS.connectTimeout = connectTimeout;
|
||||
MONGO_OPTIONS.socketTimeout = socketTimeout;
|
||||
MONGO_OPTIONS.socketKeepAlive = socketKeepAlive;
|
||||
MONGO_OPTIONS.autoConnectRetry = autoConnectRetry;
|
||||
MONGO_OPTIONS.maxAutoConnectRetryTime = maxAutoConnectRetryTime;
|
||||
MONGO_OPTIONS.slaveOk = slaveOk;
|
||||
MONGO_OPTIONS.w = writeNumber;
|
||||
MONGO_OPTIONS.wtimeout = writeTimeout;
|
||||
MONGO_OPTIONS.fsync = writeFsync;
|
||||
MongoOptions options = new MongoOptions();
|
||||
|
||||
options.connectionsPerHost = connectionsPerHost;
|
||||
options.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
options.maxWaitTime = maxWaitTime;
|
||||
options.connectTimeout = connectTimeout;
|
||||
options.socketTimeout = socketTimeout;
|
||||
options.socketKeepAlive = socketKeepAlive;
|
||||
options.autoConnectRetry = autoConnectRetry;
|
||||
options.maxAutoConnectRetryTime = maxAutoConnectRetryTime;
|
||||
options.slaveOk = slaveOk;
|
||||
options.w = writeNumber;
|
||||
options.wtimeout = writeTimeout;
|
||||
options.fsync = writeFsync;
|
||||
|
||||
if (ssl) {
|
||||
MONGO_OPTIONS.setSocketFactory(sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault());
|
||||
options.setSocketFactory(sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault());
|
||||
}
|
||||
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -222,7 +227,7 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
public MongoOptions getObject() {
|
||||
return MONGO_OPTIONS;
|
||||
return this.options;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,8 +1,26 @@
|
||||
/*
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.transaction.support.ResourceHolder;
|
||||
import org.springframework.transaction.support.ResourceHolderSynchronization;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class MongoSynchronization extends ResourceHolderSynchronization<ResourceHolder, Object> {
|
||||
|
||||
public MongoSynchronization(ResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,7 @@ import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Scanner;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -47,9 +48,13 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.OptimisticLockingFailureException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.BeanWrapper;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
@@ -67,10 +72,6 @@ import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.geo.Distance;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.geo.Metric;
|
||||
import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
@@ -95,6 +96,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.jca.cci.core.ConnectionCallback;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ResourceUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -127,7 +129,9 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Chuong Ngo
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class);
|
||||
@@ -331,7 +335,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
CommandResult result = execute(new DbCallback<CommandResult>() {
|
||||
public CommandResult doInDB(DB db) throws MongoException, DataAccessException {
|
||||
return db.command(command, options);
|
||||
return readPreference != null ? db.command(command, readPreference) : db.command(command);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -351,7 +355,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch) {
|
||||
executeQuery(query, collectionName, dch, new QueryCursorPreparer(query));
|
||||
executeQuery(query, collectionName, dch, new QueryCursorPreparer(query, null));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -369,12 +373,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
Assert.notNull(query);
|
||||
|
||||
DBObject queryObject = query.getQueryObject();
|
||||
DBObject queryObject = queryMapper.getMappedObject(query.getQueryObject(), null);
|
||||
DBObject sortObject = query.getSortObject();
|
||||
DBObject fieldsObject = query.getFieldsObject();
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: $s",
|
||||
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObject), sortObject, fieldsObject, collectionName));
|
||||
}
|
||||
|
||||
@@ -529,7 +533,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass,
|
||||
new QueryCursorPreparer(query));
|
||||
new QueryCursorPreparer(query, entityClass));
|
||||
}
|
||||
|
||||
public <T> T findById(Object id, Class<T> entityClass) {
|
||||
@@ -562,7 +566,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
BasicDBObject command = new BasicDBObject("geoNear", collection);
|
||||
command.putAll(near.toDBObject());
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, getDb().getOptions());
|
||||
List<Object> results = (List<Object>) commandResult.get("results");
|
||||
results = results == null ? Collections.emptyList() : results;
|
||||
|
||||
@@ -611,8 +615,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
public <T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName) {
|
||||
return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), query.getSortObject(),
|
||||
entityClass, update, options);
|
||||
return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(),
|
||||
getMappedSortObject(query, entityClass), entityClass, update, options);
|
||||
}
|
||||
|
||||
// Find methods that take a Query to express the query and that return a single object that is also removed from the
|
||||
@@ -623,8 +627,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public <T> T findAndRemove(Query query, Class<T> entityClass, String collectionName) {
|
||||
return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), query.getSortObject(),
|
||||
entityClass);
|
||||
|
||||
return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(),
|
||||
getMappedSortObject(query, entityClass), entityClass);
|
||||
}
|
||||
|
||||
public long count(Query query, Class<?> entityClass) {
|
||||
@@ -636,7 +641,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return count(query, null, collectionName);
|
||||
}
|
||||
|
||||
private long count(Query query, Class<?> entityClass, String collectionName) {
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
public long count(Query query, Class<?> entityClass, String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName);
|
||||
final DBObject dbObject = query == null ? null : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
@@ -740,8 +749,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MongoPersistentEntity<?> mongoPersistentEntity = getPersistentEntity(entity.getClass());
|
||||
|
||||
if (mongoPersistentEntity != null && mongoPersistentEntity.hasVersionProperty()) {
|
||||
BeanWrapper<PersistentEntity<Object, ?>, Object> wrapper = BeanWrapper.create(entity,
|
||||
this.mongoConverter.getConversionService());
|
||||
BeanWrapper<Object> wrapper = BeanWrapper.create(entity, this.mongoConverter.getConversionService());
|
||||
wrapper.setProperty(mongoPersistentEntity.getVersionProperty(), 0);
|
||||
}
|
||||
}
|
||||
@@ -759,27 +767,33 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
protected <T> void doInsertAll(Collection<? extends T> listToSave, MongoWriter<T> writer) {
|
||||
Map<String, List<T>> objs = new HashMap<String, List<T>>();
|
||||
|
||||
for (T o : listToSave) {
|
||||
Map<String, List<T>> elementsByCollection = new HashMap<String, List<T>>();
|
||||
|
||||
for (T element : listToSave) {
|
||||
|
||||
if (element == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(element.getClass());
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(o.getClass());
|
||||
if (entity == null) {
|
||||
throw new InvalidDataAccessApiUsageException("No Persitent Entity information found for the class "
|
||||
+ o.getClass().getName());
|
||||
throw new InvalidDataAccessApiUsageException("No PersistentEntity information found for " + element.getClass());
|
||||
}
|
||||
|
||||
String collection = entity.getCollection();
|
||||
List<T> collectionElements = elementsByCollection.get(collection);
|
||||
|
||||
List<T> objList = objs.get(collection);
|
||||
if (null == objList) {
|
||||
objList = new ArrayList<T>();
|
||||
objs.put(collection, objList);
|
||||
if (null == collectionElements) {
|
||||
collectionElements = new ArrayList<T>();
|
||||
elementsByCollection.put(collection, collectionElements);
|
||||
}
|
||||
objList.add(o);
|
||||
|
||||
collectionElements.add(element);
|
||||
}
|
||||
|
||||
for (Map.Entry<String, List<T>> entry : objs.entrySet()) {
|
||||
for (Map.Entry<String, List<T>> entry : elementsByCollection.entrySet()) {
|
||||
doInsertBatch(entry.getKey(), entry.getValue(), this.mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -835,12 +849,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
private <T> void doSaveVersioned(T objectToSave, MongoPersistentEntity<?> entity, String collectionName) {
|
||||
|
||||
BeanWrapper<PersistentEntity<T, ?>, T> beanWrapper = BeanWrapper.create(objectToSave,
|
||||
this.mongoConverter.getConversionService());
|
||||
BeanWrapper<T> beanWrapper = BeanWrapper.create(objectToSave, this.mongoConverter.getConversionService());
|
||||
MongoPersistentProperty idProperty = entity.getIdProperty();
|
||||
MongoPersistentProperty versionProperty = entity.getVersionProperty();
|
||||
|
||||
Number version = beanWrapper.getProperty(versionProperty, Number.class, !versionProperty.usePropertyAccess());
|
||||
Number version = beanWrapper.getProperty(versionProperty, Number.class);
|
||||
|
||||
// Fresh instance -> initialize version property
|
||||
if (version == null) {
|
||||
@@ -854,7 +867,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Query query = new Query(Criteria.where(idProperty.getName()).is(id).and(versionProperty.getName()).is(version));
|
||||
|
||||
// Bump version number
|
||||
Number number = beanWrapper.getProperty(versionProperty, Number.class, false);
|
||||
Number number = beanWrapper.getProperty(versionProperty, Number.class);
|
||||
beanWrapper.setProperty(versionProperty, number.longValue() + 1);
|
||||
|
||||
BasicDBObject dbObject = new BasicDBObject();
|
||||
@@ -996,14 +1009,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Calling update using query: " + queryObj + " and update: " + updateObj + " in collection: "
|
||||
+ collectionName);
|
||||
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName));
|
||||
}
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName,
|
||||
@@ -1013,7 +1028,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
: collection.update(queryObj, updateObj, upsert, multi, writeConcernToUse);
|
||||
|
||||
if (entity != null && entity.hasVersionProperty() && !multi) {
|
||||
if (writeResult.getN() == 0) {
|
||||
if (writeResult.getN() == 0 && dbObjectContainsVersionProperty(queryObj, entity)) {
|
||||
throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: "
|
||||
+ updateObj.toMap().toString() + " to collection " + collectionName);
|
||||
}
|
||||
@@ -1025,24 +1040,72 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
});
|
||||
}
|
||||
|
||||
public void remove(Object object) {
|
||||
private void increaseVersionForUpdateIfNecessary(MongoPersistentEntity<?> persistentEntity, Update update) {
|
||||
|
||||
if (object == null) {
|
||||
return;
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
String versionFieldName = persistentEntity.getVersionProperty().getFieldName();
|
||||
if (!update.modifies(versionFieldName)) {
|
||||
update.inc(versionFieldName, 1L);
|
||||
}
|
||||
}
|
||||
|
||||
remove(getIdQueryFor(object), object.getClass());
|
||||
}
|
||||
|
||||
public void remove(Object object, String collection) {
|
||||
private boolean dbObjectContainsVersionProperty(DBObject dbObject, MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity == null || !persistentEntity.hasVersionProperty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return dbObject.containsField(persistentEntity.getVersionProperty().getFieldName());
|
||||
}
|
||||
|
||||
public WriteResult remove(Object object) {
|
||||
|
||||
if (object == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return remove(getIdQueryFor(object), object.getClass());
|
||||
}
|
||||
|
||||
public WriteResult remove(Object object, String collection) {
|
||||
|
||||
Assert.hasText(collection);
|
||||
|
||||
if (object == null) {
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
doRemove(collection, getIdQueryFor(object), object.getClass());
|
||||
return doRemove(collection, getIdQueryFor(object), object.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@link Entry} containing the field name of the id property as {@link Entry#getKey()} and the {@link Id}s
|
||||
* property value as its {@link Entry#getValue()}.
|
||||
*
|
||||
* @param object
|
||||
* @return
|
||||
*/
|
||||
private Entry<String, Object> extractIdPropertyAndValue(Object object) {
|
||||
|
||||
Assert.notNull(object, "Id cannot be extracted from 'null'.");
|
||||
|
||||
Class<?> objectType = object.getClass();
|
||||
|
||||
if (object instanceof DBObject) {
|
||||
return Collections.singletonMap(ID_FIELD, ((DBObject) object).get(ID_FIELD)).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(objectType);
|
||||
MongoPersistentProperty idProp = entity == null ? null : entity.getIdProperty();
|
||||
|
||||
if (idProp == null) {
|
||||
throw new MappingException("No id property found for object of type " + objectType);
|
||||
}
|
||||
|
||||
Object idValue = BeanWrapper.create(object, mongoConverter.getConversionService())
|
||||
.getProperty(idProp, Object.class);
|
||||
return Collections.singletonMap(idProp.getFieldName(), idValue).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1053,21 +1116,31 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
private Query getIdQueryFor(Object object) {
|
||||
|
||||
Assert.notNull(object);
|
||||
Entry<String, Object> id = extractIdPropertyAndValue(object);
|
||||
return new Query(where(id.getKey()).is(id.getValue()));
|
||||
}
|
||||
|
||||
Class<?> objectType = object.getClass();
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(objectType);
|
||||
MongoPersistentProperty idProp = entity == null ? null : entity.getIdProperty();
|
||||
/**
|
||||
* Returns a {@link Query} for the given entities by their ids.
|
||||
*
|
||||
* @param objects must not be {@literal null} or {@literal empty}.
|
||||
* @return
|
||||
*/
|
||||
private Query getIdInQueryFor(Collection<?> objects) {
|
||||
|
||||
if (idProp == null) {
|
||||
throw new MappingException("No id property found for object of type " + objectType);
|
||||
Assert.notEmpty(objects, "Cannot create Query for empty collection.");
|
||||
|
||||
Iterator<?> it = objects.iterator();
|
||||
Entry<String, Object> firstEntry = extractIdPropertyAndValue(it.next());
|
||||
|
||||
ArrayList<Object> ids = new ArrayList<Object>(objects.size());
|
||||
ids.add(firstEntry.getValue());
|
||||
|
||||
while (it.hasNext()) {
|
||||
ids.add(extractIdPropertyAndValue(it.next()).getValue());
|
||||
}
|
||||
|
||||
ConversionService service = mongoConverter.getConversionService();
|
||||
Object idProperty = null;
|
||||
|
||||
idProperty = BeanWrapper.create(object, service).getProperty(idProp, Object.class, true);
|
||||
return new Query(where(idProp.getFieldName()).is(idProperty));
|
||||
return new Query(where(firstEntry.getKey()).in(ids));
|
||||
}
|
||||
|
||||
private void assertUpdateableIdIfNotSet(Object entity) {
|
||||
@@ -1080,7 +1153,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
ConversionService service = mongoConverter.getConversionService();
|
||||
Object idValue = BeanWrapper.create(entity, service).getProperty(idProperty, Object.class, true);
|
||||
Object idValue = BeanWrapper.create(entity, service).getProperty(idProperty, Object.class);
|
||||
|
||||
if (idValue == null && !MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(idProperty.getType())) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format(
|
||||
@@ -1089,19 +1162,19 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
public void remove(Query query, String collectionName) {
|
||||
remove(query, null, collectionName);
|
||||
public WriteResult remove(Query query, String collectionName) {
|
||||
return remove(query, null, collectionName);
|
||||
}
|
||||
|
||||
public void remove(Query query, Class<?> entityClass) {
|
||||
remove(query, entityClass, determineCollectionName(entityClass));
|
||||
public WriteResult remove(Query query, Class<?> entityClass) {
|
||||
return remove(query, entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public void remove(Query query, Class<?> entityClass, String collectionName) {
|
||||
doRemove(collectionName, query, entityClass);
|
||||
public WriteResult remove(Query query, Class<?> entityClass, String collectionName) {
|
||||
return doRemove(collectionName, query, entityClass);
|
||||
}
|
||||
|
||||
protected <T> void doRemove(final String collectionName, final Query query, final Class<T> entityClass) {
|
||||
protected <T> WriteResult doRemove(final String collectionName, final Query query, final Class<T> entityClass) {
|
||||
|
||||
if (query == null) {
|
||||
throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null!");
|
||||
@@ -1112,8 +1185,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
final DBObject queryObject = query.getQueryObject();
|
||||
final MongoPersistentEntity<?> entity = getPersistentEntity(entityClass);
|
||||
|
||||
execute(collectionName, new CollectionCallback<Void>() {
|
||||
public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
return execute(collectionName, new CollectionCallback<WriteResult>() {
|
||||
public WriteResult doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
|
||||
maybeEmitEvent(new BeforeDeleteEvent<T>(queryObject, entityClass));
|
||||
|
||||
@@ -1124,16 +1197,18 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { dboq, collection.getName() });
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { serializeToJsonSafely(dboq),
|
||||
collection.getName() });
|
||||
}
|
||||
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) : collection.remove(dboq,
|
||||
writeConcernToUse);
|
||||
|
||||
handleAnyWriteResultErrors(wr, dboq, MongoActionOperation.REMOVE);
|
||||
|
||||
maybeEmitEvent(new AfterDeleteEvent<T>(queryObject, entityClass));
|
||||
|
||||
return null;
|
||||
return wr;
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1167,6 +1242,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
|
||||
String reduceFunction, MapReduceOptions mapReduceOptions, Class<T> entityClass) {
|
||||
|
||||
String mapFunc = replaceWithResourceIfNecessary(mapFunction);
|
||||
String reduceFunc = replaceWithResourceIfNecessary(reduceFunction);
|
||||
DBCollection inputCollection = getCollection(inputCollectionName);
|
||||
@@ -1191,12 +1267,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MapReduceOutput mapReduceOutput = new MapReduceOutput(inputCollection, commandObject, commandResult);
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
|
||||
for (DBObject dbObject : mapReduceOutput.results()) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
|
||||
MapReduceResults<T> mapReduceResult = new MapReduceResults<T>(mappedResults, commandResult);
|
||||
return mapReduceResult;
|
||||
return new MapReduceResults<T>(mappedResults, commandResult);
|
||||
}
|
||||
|
||||
public <T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass) {
|
||||
@@ -1250,15 +1326,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("retval");
|
||||
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
|
||||
for (DBObject dbObject : resultSet) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
GroupByResults<T> groupByResult = new GroupByResults<T>(mappedResults, commandResult);
|
||||
return groupByResult;
|
||||
|
||||
return new GroupByResults<T>(mappedResults, commandResult);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1289,6 +1364,54 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return aggregate(aggregation, collectionName, outputType, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public <T> List<T> findAllAndRemove(Query query, String collectionName) {
|
||||
return findAndRemove(query, null, collectionName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> List<T> findAllAndRemove(Query query, Class<T> entityClass) {
|
||||
return findAllAndRemove(query, entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public <T> List<T> findAllAndRemove(Query query, Class<T> entityClass, String collectionName) {
|
||||
return doFindAndDelete(collectionName, query, entityClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)}
|
||||
* and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is
|
||||
* constructed out of the find result.
|
||||
*
|
||||
* @param collectionName
|
||||
* @param query
|
||||
* @param entityClass
|
||||
* @return
|
||||
*/
|
||||
protected <T> List<T> doFindAndDelete(String collectionName, Query query, Class<T> entityClass) {
|
||||
|
||||
List<T> result = find(query, entityClass, collectionName);
|
||||
|
||||
if (!CollectionUtils.isEmpty(result)) {
|
||||
remove(getIdInQueryFor(result), entityClass, collectionName);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected <O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType,
|
||||
AggregationOperationContext context) {
|
||||
|
||||
@@ -1303,20 +1426,35 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, getDb().getOptions());
|
||||
handleCommandError(commandResult, command);
|
||||
|
||||
// map results
|
||||
return new AggregationResults<O>(returnPotentiallyMappedResults(outputType, commandResult), commandResult);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the potentially mapped results of the given {@commandResult} contained some.
|
||||
*
|
||||
* @param outputType
|
||||
* @param commandResult
|
||||
* @return
|
||||
*/
|
||||
private <O> List<O> returnPotentiallyMappedResults(Class<O> outputType, CommandResult commandResult) {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("result");
|
||||
List<O> mappedResults = new ArrayList<O>();
|
||||
if (resultSet == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
DbObjectCallback<O> callback = new UnwrapAndReadDbObjectCallback<O>(mongoConverter, outputType);
|
||||
|
||||
List<O> mappedResults = new ArrayList<O>();
|
||||
for (DBObject dbObject : resultSet) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
|
||||
return new AggregationResults<O>(mappedResults, commandResult);
|
||||
return mappedResults;
|
||||
}
|
||||
|
||||
protected String replaceWithResourceIfNecessary(String function) {
|
||||
@@ -1348,13 +1486,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
"Can not use skip or field specification with map reduce operations");
|
||||
}
|
||||
if (query.getQueryObject() != null) {
|
||||
copyMapReduceOptions.put("query", query.getQueryObject());
|
||||
copyMapReduceOptions.put("query", queryMapper.getMappedObject(query.getQueryObject(), null));
|
||||
}
|
||||
if (query.getLimit() > 0) {
|
||||
copyMapReduceOptions.put("limit", query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
copyMapReduceOptions.put("sort", query.getSortObject());
|
||||
copyMapReduceOptions.put("sort", queryMapper.getMappedObject(query.getSortObject(), null));
|
||||
}
|
||||
}
|
||||
return copyMapReduceOptions;
|
||||
@@ -1489,12 +1627,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
CursorPreparer preparer, DbObjectCallback<T> objectCallback) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
DBObject mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity);
|
||||
|
||||
DBObject mappedFields = queryMapper.getMappedFields(fields, entity);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback,
|
||||
@@ -1532,8 +1671,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Class<T> entityClass) {
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndRemove using query: " + query + " fields: " + fields + " sort: " + sort + " for class: "
|
||||
+ entityClass + " in collection: " + collectionName);
|
||||
LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), fields, sort, entityClass, collectionName));
|
||||
}
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
@@ -1551,12 +1690,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
DBObject mappedUpdate = queryMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
+ " for class: " + entityClass + " and update: " + mappedUpdate + " in collection: " + collectionName);
|
||||
LOGGER.debug(String.format("findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
|
||||
+ "in collection: %s", serializeToJsonSafely(mappedQuery), fields, sort, entityClass,
|
||||
serializeToJsonSafely(mappedUpdate), collectionName));
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
@@ -1588,9 +1730,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
ConversionService conversionService = mongoConverter.getConversionService();
|
||||
BeanWrapper<PersistentEntity<Object, ?>, Object> wrapper = BeanWrapper.create(savedObject, conversionService);
|
||||
BeanWrapper<Object> wrapper = BeanWrapper.create(savedObject, conversionService);
|
||||
|
||||
Object idValue = wrapper.getProperty(idProp, idProp.getType(), true);
|
||||
Object idValue = wrapper.getProperty(idProp, idProp.getType());
|
||||
|
||||
if (idValue != null) {
|
||||
return;
|
||||
@@ -1834,6 +1976,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return converter;
|
||||
}
|
||||
|
||||
private DBObject getMappedSortObject(Query query, Class<?> type) {
|
||||
|
||||
if (query == null || query.getSortObject() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
// Callback implementations
|
||||
|
||||
/**
|
||||
@@ -1856,13 +2007,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
if (fields == null) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: " + query + " in db.collection: " + collection.getFullName());
|
||||
LOGGER.debug(String.format("findOne using query: %s in db.collection: %s", serializeToJsonSafely(query),
|
||||
collection.getFullName()));
|
||||
}
|
||||
return collection.findOne(query);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: " + query + " fields: " + fields + " in db.collection: "
|
||||
+ collection.getFullName());
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s in db.collection: %s",
|
||||
serializeToJsonSafely(query), fields, collection.getFullName()));
|
||||
}
|
||||
return collection.findOne(query, fields);
|
||||
}
|
||||
@@ -1891,7 +2043,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public DBCursor doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
if (fields == null) {
|
||||
|
||||
if (fields == null || fields.toMap().isEmpty()) {
|
||||
return collection.find(query);
|
||||
} else {
|
||||
return collection.find(query, fields);
|
||||
@@ -2027,9 +2180,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
class QueryCursorPreparer implements CursorPreparer {
|
||||
|
||||
private final Query query;
|
||||
private final Class<?> type;
|
||||
|
||||
public QueryCursorPreparer(Query query, Class<?> type) {
|
||||
|
||||
public QueryCursorPreparer(Query query) {
|
||||
this.query = query;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -2043,11 +2199,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
if (query.getSkip() <= 0 && query.getLimit() <= 0 && query.getSortObject() == null
|
||||
&& !StringUtils.hasText(query.getHint())) {
|
||||
&& !StringUtils.hasText(query.getHint()) && !query.getMeta().hasValues()) {
|
||||
return cursor;
|
||||
}
|
||||
|
||||
DBCursor cursorToUse = cursor;
|
||||
DBCursor cursorToUse = cursor.copy();
|
||||
|
||||
try {
|
||||
if (query.getSkip() > 0) {
|
||||
@@ -2057,11 +2213,18 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
cursorToUse = cursorToUse.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
cursorToUse = cursorToUse.sort(query.getSortObject());
|
||||
DBObject sortDbo = type != null ? getMappedSortObject(query, type) : query.getSortObject();
|
||||
cursorToUse = cursorToUse.sort(sortDbo);
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
cursorToUse = cursorToUse.hint(query.getHint());
|
||||
}
|
||||
if (query.getMeta().hasValues()) {
|
||||
for (Entry<String, Object> entry : query.getMeta().values()) {
|
||||
cursorToUse = cursorToUse.addSpecial(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,7 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedFi
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -44,9 +45,23 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
public class Aggregation {
|
||||
|
||||
public static final AggregationOperationContext DEFAULT_CONTEXT = new NoOpAggregationOperationContext();
|
||||
/**
|
||||
* References the root document, i.e. the top-level document, currently being processed in the aggregation pipeline
|
||||
* stage.
|
||||
*/
|
||||
public static final String ROOT = SystemVariable.ROOT.toString();
|
||||
|
||||
private final List<AggregationOperation> operations;
|
||||
/**
|
||||
* References the start of the field path being processed in the aggregation pipeline stage. Unless documented
|
||||
* otherwise, all stages start with CURRENT the same as ROOT.
|
||||
*/
|
||||
public static final String CURRENT = SystemVariable.CURRENT.toString();
|
||||
|
||||
public static final AggregationOperationContext DEFAULT_CONTEXT = new NoOpAggregationOperationContext();
|
||||
public static final AggregationOptions DEFAULT_OPTIONS = newAggregationOptions().build();
|
||||
|
||||
protected final List<AggregationOperation> operations;
|
||||
private final AggregationOptions options;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
@@ -66,6 +81,20 @@ public class Aggregation {
|
||||
return new Aggregation(operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this {@link Aggregation} with the given {@link AggregationOptions} set. Note that options are
|
||||
* supported in MongoDB version 2.6+.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
public Aggregation withOptions(AggregationOptions options) {
|
||||
|
||||
Assert.notNull(options, "AggregationOptions must not be null.");
|
||||
return new Aggregation(this.operations, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
@@ -92,11 +121,43 @@ public class Aggregation {
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(AggregationOperation... aggregationOperations) {
|
||||
this(asAggregationList(aggregationOperations));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected static List<AggregationOperation> asAggregationList(AggregationOperation... aggregationOperations) {
|
||||
|
||||
Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty!");
|
||||
|
||||
return Arrays.asList(aggregationOperations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(List<AggregationOperation> aggregationOperations) {
|
||||
this(aggregationOperations, DEFAULT_OPTIONS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
* @param options must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(List<AggregationOperation> aggregationOperations, AggregationOptions options) {
|
||||
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
Assert.isTrue(aggregationOperations.length > 0, "At least one AggregationOperation has to be provided");
|
||||
Assert.isTrue(aggregationOperations.size() > 0, "At least one AggregationOperation has to be provided");
|
||||
Assert.notNull(options, "AggregationOptions must not be null!");
|
||||
|
||||
this.operations = Arrays.asList(aggregationOperations);
|
||||
this.operations = aggregationOperations;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -231,6 +292,29 @@ public class Aggregation {
|
||||
return Fields.from(field(name, target));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static GeoNearOperation geoNear(NearQuery query, String distanceField) {
|
||||
return new GeoNearOperation(query, distanceField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions.Builder}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
public static AggregationOptions.Builder newAggregationOptions() {
|
||||
return new AggregationOptions.Builder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts this {@link Aggregation} specification to a {@link DBObject}.
|
||||
*
|
||||
@@ -248,13 +332,15 @@ public class Aggregation {
|
||||
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields());
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), rootContext);
|
||||
}
|
||||
}
|
||||
|
||||
DBObject command = new BasicDBObject("aggregate", inputCollectionName);
|
||||
command.put("pipeline", operationDocuments);
|
||||
|
||||
command = options.applyAndReturnPotentiallyChangedCommand(command);
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
@@ -302,4 +388,51 @@ public class Aggregation {
|
||||
return new FieldReference(new ExposedField(new AggregationField(name), true));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes the system variables available in MongoDB aggregation framework pipeline expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation-variables
|
||||
*/
|
||||
enum SystemVariable {
|
||||
|
||||
ROOT, CURRENT;
|
||||
|
||||
private static final String PREFIX = "$$";
|
||||
|
||||
/**
|
||||
* Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false}
|
||||
* otherwise.
|
||||
*
|
||||
* @param fieldRef may be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static boolean isReferingToSystemVariable(String fieldRef) {
|
||||
|
||||
if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int indexOfFirstDot = fieldRef.indexOf('.');
|
||||
String candidate = fieldRef.substring(2, indexOfFirstDot == -1 ? fieldRef.length() : indexOfFirstDot);
|
||||
|
||||
for (SystemVariable value : values()) {
|
||||
if (value.name().equals(candidate)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Enum#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return PREFIX.concat(name());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,189 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support
|
||||
* aggregation options can be found in the MongoDB reference documentation
|
||||
* http://docs.mongodb.org/manual/reference/command/aggregate/#aggregate
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @see Aggregation#withOptions(AggregationOptions)
|
||||
* @see TypedAggregation#withOptions(AggregationOptions)
|
||||
* @since 1.6
|
||||
*/
|
||||
public class AggregationOptions {
|
||||
|
||||
private static final String CURSOR = "cursor";
|
||||
private static final String EXPLAIN = "explain";
|
||||
private static final String ALLOW_DISK_USE = "allowDiskUse";
|
||||
|
||||
private final boolean allowDiskUse;
|
||||
private final boolean explain;
|
||||
private final DBObject cursor;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
*
|
||||
* @param allowDiskUse whether to off-load intensive sort-operations to disk.
|
||||
* @param explain whether to get the execution plan for the aggregation instead of the actual results.
|
||||
* @param cursor can be {@literal null}, used to pass additional options to the aggregation.
|
||||
*/
|
||||
public AggregationOptions(boolean allowDiskUse, boolean explain, DBObject cursor) {
|
||||
|
||||
this.allowDiskUse = allowDiskUse;
|
||||
this.explain = explain;
|
||||
this.cursor = cursor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables writing to temporary files. When set to true, aggregation stages can write data to the _tmp subdirectory in
|
||||
* the dbPath directory.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isAllowDiskUse() {
|
||||
return allowDiskUse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies to return the information on the processing of the pipeline.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isExplain() {
|
||||
return explain;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify a document that contains options that control the creation of the cursor object.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public DBObject getCursor() {
|
||||
return cursor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration
|
||||
* applied.
|
||||
*
|
||||
* @param command the aggregation command.
|
||||
* @return
|
||||
*/
|
||||
DBObject applyAndReturnPotentiallyChangedCommand(DBObject command) {
|
||||
|
||||
DBObject result = new BasicDBObject(command.toMap());
|
||||
|
||||
if (allowDiskUse && !result.containsField(ALLOW_DISK_USE)) {
|
||||
result.put(ALLOW_DISK_USE, allowDiskUse);
|
||||
}
|
||||
|
||||
if (explain && !result.containsField(EXPLAIN)) {
|
||||
result.put(EXPLAIN, explain);
|
||||
}
|
||||
|
||||
if (cursor != null && !result.containsField(CURSOR)) {
|
||||
result.put("cursor", cursor);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link DBObject} representation of this {@link AggregationOptions}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public DBObject toDbObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
dbo.put(ALLOW_DISK_USE, allowDiskUse);
|
||||
dbo.put(EXPLAIN, explain);
|
||||
dbo.put(CURSOR, cursor);
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return toDbObject().toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* A Builder for {@link AggregationOptions}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class Builder {
|
||||
|
||||
private boolean allowDiskUse;
|
||||
private boolean explain;
|
||||
private DBObject cursor;
|
||||
|
||||
/**
|
||||
* Defines whether to off-load intensive sort-operations to disk.
|
||||
*
|
||||
* @param allowDiskUse
|
||||
* @return
|
||||
*/
|
||||
public Builder allowDiskUse(boolean allowDiskUse) {
|
||||
|
||||
this.allowDiskUse = allowDiskUse;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines whether to get the execution plan for the aggregation instead of the actual results.
|
||||
*
|
||||
* @param explain
|
||||
* @return
|
||||
*/
|
||||
public Builder explain(boolean explain) {
|
||||
|
||||
this.explain = explain;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional options to the aggregation.
|
||||
*
|
||||
* @param cursor
|
||||
* @return
|
||||
*/
|
||||
public Builder cursor(DBObject cursor) {
|
||||
|
||||
this.cursor = cursor;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions} instance with the given configuration.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public AggregationOptions build() {
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @param <T> The class in which the results are mapped onto.
|
||||
* @since 1.3
|
||||
*/
|
||||
@@ -90,6 +91,16 @@ public class AggregationResults<T> implements Iterable<T> {
|
||||
return serverUsed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the raw result that was returned by the server.
|
||||
*
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
public DBObject getRawResults() {
|
||||
return rawResults;
|
||||
}
|
||||
|
||||
private String parseServerUsed() {
|
||||
|
||||
Object object = rawResults.get("serverUsed");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,7 +32,7 @@ import org.springframework.util.CompositeIterator;
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ExposedFields implements Iterable<ExposedField> {
|
||||
public final class ExposedFields implements Iterable<ExposedField> {
|
||||
|
||||
private static final List<ExposedField> NO_FIELDS = Collections.emptyList();
|
||||
private static final ExposedFields EMPTY = new ExposedFields(NO_FIELDS, NO_FIELDS);
|
||||
@@ -88,7 +88,7 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either sythetic or non-synthetic way.
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either synthetic or non-synthetic way.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param synthetic
|
||||
@@ -107,7 +107,7 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} with the given orignals and synthetics.
|
||||
* Creates a new {@link ExposedFields} with the given originals and synthetics.
|
||||
*
|
||||
* @param originals must not be {@literal null}.
|
||||
* @param synthetic must not be {@literal null}.
|
||||
@@ -268,14 +268,21 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
return field.isAliased();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the synthetic
|
||||
*/
|
||||
public boolean isSynthetic() {
|
||||
return synthetic;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field can be referred to using the given name.
|
||||
*
|
||||
* @param input
|
||||
* @param name
|
||||
* @return
|
||||
*/
|
||||
public boolean canBeReferredToBy(String input) {
|
||||
return getTarget().equals(input);
|
||||
public boolean canBeReferredToBy(String name) {
|
||||
return getName().equals(name) || getTarget().equals(name);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -340,14 +347,8 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
public FieldReference(ExposedField field) {
|
||||
|
||||
Assert.notNull(field, "ExposedField must not be null!");
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
public boolean isSynthetic() {
|
||||
return field.synthetic;
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -361,6 +362,16 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
return field.synthetic ? target : String.format("%s.%s", Fields.UNDERSCORE_ID, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the reference value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Object getReferenceValue() {
|
||||
return field.synthetic && !field.isAliased() ? 1 : toString();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,16 +32,22 @@ import com.mongodb.DBObject;
|
||||
class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final ExposedFields exposedFields;
|
||||
private final AggregationOperationContext rootContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}.
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given
|
||||
* {@link AggregationOperationContext} to perform a mapping to mongo types if necessary.
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
* @param rootContext must not be {@literal null}.
|
||||
*/
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields) {
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, AggregationOperationContext rootContext) {
|
||||
|
||||
Assert.notNull(exposedFields, "ExposedFields must not be null!");
|
||||
Assert.notNull(rootContext, "RootContext must not be null!");
|
||||
|
||||
this.exposedFields = exposedFields;
|
||||
this.rootContext = rootContext;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -50,7 +56,7 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return dbObject;
|
||||
return rootContext.getMappedObject(dbObject);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -59,7 +65,7 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return getReference(field.getTarget());
|
||||
return getReference(field, field.getTarget());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -68,11 +74,42 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
return getReference(null, name);
|
||||
}
|
||||
|
||||
ExposedField field = exposedFields.getField(name);
|
||||
/**
|
||||
* Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}.
|
||||
*
|
||||
* @param field may be {@literal null}
|
||||
* @param name must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private FieldReference getReference(Field field, String name) {
|
||||
|
||||
if (field != null) {
|
||||
return new FieldReference(field);
|
||||
Assert.notNull(name, "Name must not be null!");
|
||||
|
||||
ExposedField exposedField = exposedFields.getField(name);
|
||||
|
||||
if (exposedField != null) {
|
||||
|
||||
if (field != null) {
|
||||
// we return a FieldReference to the given field directly to make sure that we reference the proper alias here.
|
||||
return new FieldReference(new ExposedField(field, exposedField.isSynthetic()));
|
||||
}
|
||||
|
||||
return new FieldReference(exposedField);
|
||||
}
|
||||
|
||||
if (name.contains(".")) {
|
||||
|
||||
// for nested field references we only check that the root field exists.
|
||||
ExposedField rootField = exposedFields.getField(name.split("\\.")[0]);
|
||||
|
||||
if (rootField != null) {
|
||||
|
||||
// We have to synthetic to true, in order to render the field-name as is.
|
||||
return new FieldReference(new ExposedField(name, true));
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,15 +30,16 @@ import org.springframework.util.StringUtils;
|
||||
* Value object to capture a list of {@link Field} instances.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Fields implements Iterable<Field> {
|
||||
public final class Fields implements Iterable<Field> {
|
||||
|
||||
private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s! Please "
|
||||
+ "customize your field definitions to get to unique field names!";
|
||||
|
||||
public static String UNDERSCORE_ID = "_id";
|
||||
public static String UNDERSCORE_ID_REF = "$_id";
|
||||
public static final String UNDERSCORE_ID = "_id";
|
||||
public static final String UNDERSCORE_ID_REF = "$_id";
|
||||
|
||||
private final List<Field> fields;
|
||||
|
||||
@@ -83,6 +84,15 @@ public class Fields implements Iterable<Field> {
|
||||
return new AggregationField(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Field} with the given {@code name} and {@code target}.
|
||||
* <p>
|
||||
* The {@code target} is the name of the backing document field that will be aliased with {@code name}.
|
||||
*
|
||||
* @param name
|
||||
* @param target must not be {@literal null} or empty
|
||||
* @return
|
||||
*/
|
||||
public static Field field(String name, String target) {
|
||||
Assert.hasText(target, "Target must not be null or empty!");
|
||||
return new AggregationField(name, target);
|
||||
@@ -186,15 +196,24 @@ public class Fields implements Iterable<Field> {
|
||||
private final String target;
|
||||
|
||||
/**
|
||||
* Creates an aggregation fieldwith the given name. As no target is set explicitly, the name will be used as target
|
||||
* as well.
|
||||
* Creates an aggregation field with the given {@code name}.
|
||||
*
|
||||
* @param key
|
||||
* @see AggregationField#AggregationField(String, String).
|
||||
* @param name must not be {@literal null} or empty
|
||||
*/
|
||||
public AggregationField(String key) {
|
||||
this(key, null);
|
||||
public AggregationField(String name) {
|
||||
this(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an aggregation field with the given {@code name} and {@code target}.
|
||||
* <p>
|
||||
* The {@code name} serves as an alias for the actual backing document field denoted by {@code target}. If no target
|
||||
* is set explicitly, the name will be used as target.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty
|
||||
* @param target
|
||||
*/
|
||||
public AggregationField(String name, String target) {
|
||||
|
||||
String nameToSet = cleanUp(name);
|
||||
@@ -203,7 +222,7 @@ public class Fields implements Iterable<Field> {
|
||||
Assert.hasText(nameToSet, "AggregationField name must not be null or empty!");
|
||||
|
||||
if (target == null && name.contains(".")) {
|
||||
this.name = nameToSet.substring(nameToSet.indexOf(".") + 1);
|
||||
this.name = nameToSet.substring(nameToSet.indexOf('.') + 1);
|
||||
this.target = nameToSet;
|
||||
} else {
|
||||
this.name = nameToSet;
|
||||
@@ -217,6 +236,10 @@ public class Fields implements Iterable<Field> {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (Aggregation.SystemVariable.isReferingToSystemVariable(source)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
int dollarIndex = source.lastIndexOf('$');
|
||||
return dollarIndex == -1 ? source : source.substring(dollarIndex + 1);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,17 +22,33 @@ import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Represents a {@code geoNear} aggregation operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#geoNear(NearQuery, String)} instead of creating
|
||||
* instances of this class directly.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class GeoNearOperation implements AggregationOperation {
|
||||
|
||||
private final NearQuery nearQuery;
|
||||
private final String distanceField;
|
||||
|
||||
public GeoNearOperation(NearQuery nearQuery) {
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} from the given {@link NearQuery} and the given distance field. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null}.
|
||||
*/
|
||||
public GeoNearOperation(NearQuery nearQuery, String distanceField) {
|
||||
|
||||
Assert.notNull(nearQuery, "NearQuery must not be null.");
|
||||
Assert.hasLength(distanceField, "Distance field must not be null or empty.");
|
||||
|
||||
Assert.notNull(nearQuery);
|
||||
this.nearQuery = nearQuery;
|
||||
this.distanceField = distanceField;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -41,6 +57,10 @@ public class GeoNearOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$geoNear", context.getMappedObject(nearQuery.toDBObject()));
|
||||
|
||||
BasicDBObject command = (BasicDBObject) context.getMappedObject(nearQuery.toDBObject());
|
||||
command.put("distanceField", distanceField);
|
||||
|
||||
return new BasicDBObject("$geoNear", command);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,6 +31,9 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $group}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#group(Fields)} instead of creating instances of this
|
||||
* class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/group/#stage._S_group
|
||||
* @author Sebastian Herold
|
||||
@@ -99,7 +102,7 @@ public class GroupOperation implements FieldsExposingAggregationOperation {
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GroupOperationBuilder {
|
||||
public static final class GroupOperationBuilder {
|
||||
|
||||
private final GroupOperation groupOperation;
|
||||
private final Operation operation;
|
||||
@@ -364,7 +367,16 @@ public class GroupOperation implements FieldsExposingAggregationOperation {
|
||||
}
|
||||
|
||||
public Object getValue(AggregationOperationContext context) {
|
||||
return reference == null ? value : context.getReference(reference).toString();
|
||||
|
||||
if (reference == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (Aggregation.SystemVariable.isReferingToSystemVariable(reference)) {
|
||||
return reference;
|
||||
}
|
||||
|
||||
return context.getReference(reference).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,14 +21,17 @@ import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $limit}-operation
|
||||
* Encapsulates the {@code $limit}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#limit(long)} instead of creating instances of this
|
||||
* class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/limit/
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
class LimitOperation implements AggregationOperation {
|
||||
public class LimitOperation implements AggregationOperation {
|
||||
|
||||
private final long maxElements;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,14 +15,18 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $match}-operation
|
||||
* Encapsulates the {@code $match}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method
|
||||
* {@link Aggregation#match(org.springframework.data.mongodb.core.query.Criteria)} instead of creating instances of this
|
||||
* class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/match/
|
||||
* @author Sebastian Herold
|
||||
@@ -32,17 +36,17 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
public class MatchOperation implements AggregationOperation {
|
||||
|
||||
private final Criteria criteria;
|
||||
private final CriteriaDefinition criteriaDefinition;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link Criteria}.
|
||||
* Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
*/
|
||||
public MatchOperation(Criteria criteria) {
|
||||
public MatchOperation(CriteriaDefinition criteriaDefinition) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
this.criteria = criteria;
|
||||
Assert.notNull(criteriaDefinition, "Criteria must not be null!");
|
||||
this.criteriaDefinition = criteriaDefinition;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -51,6 +55,6 @@ public class MatchOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$match", context.getMappedObject(criteria.getCriteriaObject()));
|
||||
return new BasicDBObject("$match", context.getMappedObject(criteriaDefinition.getCriteriaObject()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,19 +21,20 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $project}-operation. Projection of field to be used in an
|
||||
* {@link Aggregation}. A projection is similar to a {@link Field} inclusion/exclusion but more powerful. It can
|
||||
* generate new fields, change values of given field etc.
|
||||
* Encapsulates the aggregation framework {@code $project}-operation.
|
||||
* <p>
|
||||
* Projection of field to be used in an {@link Aggregation}. A projection is similar to a {@link Field}
|
||||
* inclusion/exclusion but more powerful. It can generate new fields, change values of given field etc.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#project(Fields)} instead of creating instances of
|
||||
* this class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/project/
|
||||
* @author Tobias Trelle
|
||||
@@ -236,24 +237,51 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link ProjectionOperationBuilder} that is used for SpEL expression based projections.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class ExpressionProjectionOperationBuilder extends AbstractProjectionOperationBuilder {
|
||||
public static class ExpressionProjectionOperationBuilder extends ProjectionOperationBuilder {
|
||||
|
||||
private final Object[] params;
|
||||
private final String expression;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionProjectionOperationBuilder} for the given value, {@link ProjectionOperation} and
|
||||
* parameters.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param operation must not be {@literal null}.
|
||||
* @param parameters
|
||||
*/
|
||||
public ExpressionProjectionOperationBuilder(Object value, ProjectionOperation operation, Object[] parameters) {
|
||||
public ExpressionProjectionOperationBuilder(String expression, ProjectionOperation operation, Object[] parameters) {
|
||||
|
||||
super(value, operation);
|
||||
this.params = parameters;
|
||||
super(expression, operation, null);
|
||||
this.expression = expression;
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder#project(java.lang.String, java.lang.Object[])
|
||||
*/
|
||||
@Override
|
||||
public ProjectionOperationBuilder project(String operation, final Object... values) {
|
||||
|
||||
OperationProjection operationProjection = new OperationProjection(Fields.field(value.toString()), operation,
|
||||
values) {
|
||||
@Override
|
||||
protected List<Object> getOperationArguments(AggregationOperationContext context) {
|
||||
|
||||
List<Object> result = new ArrayList<Object>(values.length + 1);
|
||||
result.add(ExpressionProjection.toMongoExpression(context,
|
||||
ExpressionProjectionOperationBuilder.this.expression, ExpressionProjectionOperationBuilder.this.params));
|
||||
result.addAll(Arrays.asList(values));
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
return new ProjectionOperationBuilder(value, this.operation.and(operationProjection), operationProjection);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -263,7 +291,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
@Override
|
||||
public ProjectionOperation as(String alias) {
|
||||
|
||||
Field expressionField = Fields.field(alias, "expr");
|
||||
Field expressionField = Fields.field(alias, alias);
|
||||
return this.operation.and(new ExpressionProjection(expressionField, this.value.toString(), params));
|
||||
}
|
||||
|
||||
@@ -295,7 +323,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
Assert.notNull(parameters, "Parameters must not be null!");
|
||||
|
||||
this.expression = expression;
|
||||
this.params = parameters;
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -304,7 +332,11 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(getExposedField().getName(), TRANSFORMER.transform(expression, context, params));
|
||||
return new BasicDBObject(getExposedField().getName(), toMongoExpression(context, expression, params));
|
||||
}
|
||||
|
||||
protected static Object toMongoExpression(AggregationOperationContext context, String expression, Object[] params) {
|
||||
return TRANSFORMER.transform(expression, context, params);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -317,8 +349,10 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public static class ProjectionOperationBuilder extends AbstractProjectionOperationBuilder {
|
||||
|
||||
private static final String NUMBER_NOT_NULL = "Number must not be null!";
|
||||
private static final String FIELD_REFERENCE_NOT_NULL = "Field reference must not be null!";
|
||||
|
||||
private final String name;
|
||||
private final ProjectionOperation operation;
|
||||
private final OperationProjection previousProjection;
|
||||
|
||||
/**
|
||||
@@ -333,7 +367,23 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
super(name, operation);
|
||||
|
||||
this.name = name;
|
||||
this.operation = operation;
|
||||
this.previousProjection = previousProjection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperationBuilder} for the field with the given value on top of the given
|
||||
* {@link ProjectionOperation}.
|
||||
*
|
||||
* @param value
|
||||
* @param operation
|
||||
* @param previousProjection
|
||||
*/
|
||||
protected ProjectionOperationBuilder(Object value, ProjectionOperation operation,
|
||||
OperationProjection previousProjection) {
|
||||
|
||||
super(value, operation);
|
||||
|
||||
this.name = null;
|
||||
this.previousProjection = previousProjection;
|
||||
}
|
||||
|
||||
@@ -383,7 +433,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
return project("add", number);
|
||||
}
|
||||
|
||||
@@ -420,7 +470,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder minus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("subtract", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
@@ -432,7 +482,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
return project("multiply", number);
|
||||
}
|
||||
|
||||
@@ -445,7 +495,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("multiply", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
@@ -457,7 +507,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, FIELD_REFERENCE_NOT_NULL);
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("divide", number);
|
||||
}
|
||||
@@ -471,7 +521,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("divide", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
@@ -484,7 +534,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("mod", number);
|
||||
}
|
||||
@@ -498,7 +548,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("mod", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
@@ -519,8 +569,9 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder project(String operation, Object... values) {
|
||||
OperationProjection projectionOperation = new OperationProjection(Fields.field(name), operation, values);
|
||||
return new ProjectionOperationBuilder(name, this.operation.and(projectionOperation), projectionOperation);
|
||||
OperationProjection operationProjection = new OperationProjection(Fields.field(value.toString()), operation,
|
||||
values);
|
||||
return new ProjectionOperationBuilder(value, this.operation.and(operationProjection), operationProjection);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -625,9 +676,12 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
// implicit reference or explicit include?
|
||||
if (value == null || Boolean.TRUE.equals(value)) {
|
||||
|
||||
if (Aggregation.SystemVariable.isReferingToSystemVariable(field.getTarget())) {
|
||||
return field.getTarget();
|
||||
}
|
||||
|
||||
// check whether referenced field exists in the context
|
||||
FieldReference reference = context.getReference(field.getTarget());
|
||||
return reference.isSynthetic() && !field.isAliased() ? 1 : reference.toString();
|
||||
return context.getReference(field).getReferenceValue();
|
||||
|
||||
} else if (Boolean.FALSE.equals(value)) {
|
||||
|
||||
@@ -648,7 +702,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
/**
|
||||
* Creates a new {@link OperationProjection} for the given field.
|
||||
*
|
||||
* @param name the name of the field to add the operation projection for, must not be {@literal null} or empty.
|
||||
* @param field the name of the field to add the operation projection for, must not be {@literal null} or empty.
|
||||
* @param operation the actual operation key, must not be {@literal null} or empty.
|
||||
* @param values the values to pass into the operation, must not be {@literal null}.
|
||||
*/
|
||||
@@ -671,18 +725,15 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBList values = new BasicDBList();
|
||||
values.addAll(buildReferences(context));
|
||||
DBObject inner = new BasicDBObject("$" + operation, getOperationArguments(context));
|
||||
|
||||
DBObject inner = new BasicDBObject("$" + operation, values);
|
||||
|
||||
return new BasicDBObject(this.field.getName(), inner);
|
||||
return new BasicDBObject(getField().getName(), inner);
|
||||
}
|
||||
|
||||
private List<Object> buildReferences(AggregationOperationContext context) {
|
||||
protected List<Object> getOperationArguments(AggregationOperationContext context) {
|
||||
|
||||
List<Object> result = new ArrayList<Object>(values.size());
|
||||
result.add(context.getReference(field.getTarget()).toString());
|
||||
result.add(context.getReference(getField().getName()).toString());
|
||||
|
||||
for (Object element : values) {
|
||||
result.add(element instanceof Field ? context.getReference((Field) element).toString() : element);
|
||||
@@ -691,6 +742,15 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the field that holds the {@link OperationProjection}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected Field getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of this {@link OperationProjection} with the given alias.
|
||||
*
|
||||
@@ -698,7 +758,27 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
* @return
|
||||
*/
|
||||
public OperationProjection withAlias(String alias) {
|
||||
return new OperationProjection(Fields.field(alias, this.field.getName()), operation, values.toArray());
|
||||
|
||||
final Field aliasedField = Fields.field(alias, this.field.getName());
|
||||
return new OperationProjection(aliasedField, operation, values.toArray()) {
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.OperationProjection#getField()
|
||||
*/
|
||||
@Override
|
||||
protected Field getField() {
|
||||
return aliasedField;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Object> getOperationArguments(AggregationOperationContext context) {
|
||||
|
||||
// We have to make sure that we use the arguments from the "previous" OperationProjection that we replace
|
||||
// with this new instance.
|
||||
|
||||
return OperationProjection.this.getOperationArguments(context);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -730,6 +810,96 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
return new BasicDBObject(name, nestedObject);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the minute from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractMinute() {
|
||||
return project("minute");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the hour from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractHour() {
|
||||
return project("hour");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the second from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractSecond() {
|
||||
return project("second");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the millisecond from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractMillisecond() {
|
||||
return project("millisecond");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the year from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractYear() {
|
||||
return project("year");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the month from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractMonth() {
|
||||
return project("month");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the week from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractWeek() {
|
||||
return project("week");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the dayOfYear from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractDayOfYear() {
|
||||
return project("dayOfYear");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the dayOfMonth from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractDayOfMonth() {
|
||||
return project("dayOfMonth");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the dayOfWeek from a date expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder extractDayOfWeek() {
|
||||
return project("dayOfWeek");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -770,5 +940,4 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public abstract DBObject toDBObject(AggregationOperationContext context);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,9 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $skip}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#skip(int)} instead of creating instances of this
|
||||
* class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/skip/
|
||||
* @author Thomas Darimont
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,9 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $sort}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#sort(Direction, String...)} instead of creating
|
||||
* instances of this class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/sort/#pipe._S_sort
|
||||
* @author Thomas Darimont
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -495,7 +495,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
if (currentNode.hasfirstChildNotOfType(Indexer.class)) {
|
||||
// we have a property path expression like: foo.bar -> render as reference
|
||||
return context.getFieldReference().toString();
|
||||
return context.addToPreviousOrReturn(context.getFieldReference().toString());
|
||||
}
|
||||
|
||||
return context.addToPreviousOrReturn(currentNode.getValue());
|
||||
|
||||
@@ -88,14 +88,16 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(name, type);
|
||||
|
||||
return getReferenceFor(field(propertyPath.getLeafProperty().getName(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)));
|
||||
return getReferenceFor(field(name));
|
||||
}
|
||||
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
return new FieldReference(new ExposedField(field, true));
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(
|
||||
field.getTarget(), type);
|
||||
Field mappedField = field(propertyPath.getLeafProperty().getName(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE));
|
||||
|
||||
return new FieldReference(new ExposedField(mappedField, true));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -30,11 +32,34 @@ public class TypedAggregation<I> extends Aggregation {
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param inputType must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public TypedAggregation(Class<I> inputType, AggregationOperation... operations) {
|
||||
this(inputType, asAggregationList(operations));
|
||||
}
|
||||
|
||||
super(operations);
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param inputType must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public TypedAggregation(Class<I> inputType, List<AggregationOperation> operations) {
|
||||
this(inputType, operations, DEFAULT_OPTIONS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} from the given {@link AggregationOperation}s and the given
|
||||
* {@link AggregationOptions}.
|
||||
*
|
||||
* @param inputType must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
* @param options must not be {@literal null}.
|
||||
*/
|
||||
public TypedAggregation(Class<I> inputType, List<AggregationOperation> operations, AggregationOptions options) {
|
||||
|
||||
super(operations, options);
|
||||
|
||||
Assert.notNull(inputType, "Input type must not be null!");
|
||||
this.inputType = inputType;
|
||||
@@ -48,4 +73,14 @@ public class TypedAggregation<I> extends Aggregation {
|
||||
public Class<I> getInputType() {
|
||||
return inputType;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Aggregation#withOptions(org.springframework.data.mongodb.core.aggregation.AggregationOptions)
|
||||
*/
|
||||
public TypedAggregation<I> withOptions(AggregationOptions options) {
|
||||
|
||||
Assert.notNull(options, "AggregationOptions must not be null.");
|
||||
return new TypedAggregation<I>(inputType, operations, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,9 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $unwind}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#unwind(String)} instead of creating instances of
|
||||
* this class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/unwind/#pipe._S_unwind
|
||||
* @author Thomas Darimont
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,7 +20,7 @@ import java.math.BigInteger;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.ConversionServiceFactory;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
@@ -46,10 +46,8 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
*
|
||||
* @param conversionService
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public AbstractMongoConverter(GenericConversionService conversionService) {
|
||||
this.conversionService = conversionService == null ? ConversionServiceFactory.createDefaultConversionService()
|
||||
: conversionService;
|
||||
this.conversionService = conversionService == null ? new DefaultConversionService() : conversionService;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,13 +17,15 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -44,6 +46,7 @@ import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToS
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToURLConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.TermToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.URLToStringConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -56,6 +59,8 @@ import org.springframework.util.Assert;
|
||||
* .
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class CustomConversions {
|
||||
|
||||
@@ -67,10 +72,13 @@ public class CustomConversions {
|
||||
private final Set<ConvertiblePair> writingPairs;
|
||||
private final Set<Class<?>> customSimpleTypes;
|
||||
private final SimpleTypeHolder simpleTypeHolder;
|
||||
private final Map<Class<?>, HashMap<Class<?>, CacheValue>> cache;
|
||||
|
||||
private final List<Object> converters;
|
||||
|
||||
private final Map<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
private final Map<ConvertiblePair, CacheValue> customWriteTargetTypes;
|
||||
private final Map<Class<?>, CacheValue> rawWriteTargetTypes;
|
||||
|
||||
/**
|
||||
* Creates an empty {@link CustomConversions} object.
|
||||
*/
|
||||
@@ -90,24 +98,34 @@ public class CustomConversions {
|
||||
this.readingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.writingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.customSimpleTypes = new HashSet<Class<?>>();
|
||||
this.cache = new HashMap<Class<?>, HashMap<Class<?>, CacheValue>>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<ConvertiblePair, CacheValue>();
|
||||
this.customWriteTargetTypes = new ConcurrentHashMap<ConvertiblePair, CacheValue>();
|
||||
this.rawWriteTargetTypes = new ConcurrentHashMap<Class<?>, CacheValue>();
|
||||
|
||||
this.converters = new ArrayList<Object>();
|
||||
this.converters.addAll(converters);
|
||||
this.converters.add(CustomToStringConverter.INSTANCE);
|
||||
this.converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
this.converters.add(BigIntegerToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
this.converters.add(URLToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToURLConverter.INSTANCE);
|
||||
this.converters.add(DBObjectToStringConverter.INSTANCE);
|
||||
this.converters.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
List<Object> toRegister = new ArrayList<Object>();
|
||||
|
||||
for (Object c : this.converters) {
|
||||
// Add user provided converters to make sure they can override the defaults
|
||||
toRegister.addAll(converters);
|
||||
toRegister.add(CustomToStringConverter.INSTANCE);
|
||||
toRegister.add(BigDecimalToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigDecimalConverter.INSTANCE);
|
||||
toRegister.add(BigIntegerToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigIntegerConverter.INSTANCE);
|
||||
toRegister.add(URLToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToURLConverter.INSTANCE);
|
||||
toRegister.add(DBObjectToStringConverter.INSTANCE);
|
||||
toRegister.add(TermToStringConverter.INSTANCE);
|
||||
|
||||
toRegister.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
toRegister.addAll(GeoConverters.getConvertersToRegister());
|
||||
|
||||
for (Object c : toRegister) {
|
||||
registerConversion(c);
|
||||
}
|
||||
|
||||
Collections.reverse(toRegister);
|
||||
|
||||
this.converters = Collections.unmodifiableList(toRegister);
|
||||
this.simpleTypeHolder = new SimpleTypeHolder(customSimpleTypes, MongoSimpleTypes.HOLDER);
|
||||
}
|
||||
|
||||
@@ -195,25 +213,25 @@ public class CustomConversions {
|
||||
*
|
||||
* @param pair
|
||||
*/
|
||||
private void register(ConverterRegistration context) {
|
||||
private void register(ConverterRegistration converterRegistration) {
|
||||
|
||||
ConvertiblePair pair = context.getConvertiblePair();
|
||||
ConvertiblePair pair = converterRegistration.getConvertiblePair();
|
||||
|
||||
if (context.isReading()) {
|
||||
if (converterRegistration.isReading()) {
|
||||
|
||||
readingPairs.add(pair);
|
||||
|
||||
if (LOG.isWarnEnabled() && !context.isSimpleSourceType()) {
|
||||
if (LOG.isWarnEnabled() && !converterRegistration.isSimpleSourceType()) {
|
||||
LOG.warn(String.format(READ_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType()));
|
||||
}
|
||||
}
|
||||
|
||||
if (context.isWriting()) {
|
||||
if (converterRegistration.isWriting()) {
|
||||
|
||||
writingPairs.add(pair);
|
||||
customSimpleTypes.add(pair.getSourceType());
|
||||
|
||||
if (LOG.isWarnEnabled() && !context.isSimpleTargetType()) {
|
||||
if (LOG.isWarnEnabled() && !converterRegistration.isSimpleTargetType()) {
|
||||
LOG.warn(String.format(WRITE_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType()));
|
||||
}
|
||||
}
|
||||
@@ -223,84 +241,127 @@ public class CustomConversions {
|
||||
* Returns the target type to convert to in case we have a custom conversion registered to convert the given source
|
||||
* type into a Mongo native one.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source) {
|
||||
return getCustomWriteTarget(source, null);
|
||||
public Class<?> getCustomWriteTarget(final Class<?> sourceType) {
|
||||
|
||||
return getOrCreateAndCache(sourceType, rawWriteTargetTypes, new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, null, writingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the target type we can write an onject of the given source type to. The returned type might be a subclass
|
||||
* oth the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply return the
|
||||
* first target type matching or {@literal null} if no conversion can be found.
|
||||
* Returns the target type we can readTargetWriteLocl an inject of the given source type to. The returned type might
|
||||
* be a subclass of the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply
|
||||
* return the first target type matching or {@literal null} if no conversion can be found.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
public Class<?> getCustomWriteTarget(final Class<?> sourceType, final Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(source);
|
||||
return getCustomTarget(source, expectedTargetType, writingPairs);
|
||||
if (requestedTargetType == null) {
|
||||
return getCustomWriteTarget(sourceType);
|
||||
}
|
||||
|
||||
return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customWriteTargetTypes,
|
||||
new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write into a Mongo native type. The returned type might
|
||||
* be a subclass oth the given expected type though.
|
||||
* Returns whether we have a custom conversion registered to readTargetWriteLocl into a Mongo native type. The
|
||||
* returned type might be a subclass of the given expected type though.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> source) {
|
||||
return hasCustomWriteTarget(source, null);
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType) {
|
||||
return hasCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write an object of the given source type into an object
|
||||
* of the given Mongo native target type.
|
||||
* Returns whether we have a custom conversion registered to readTargetWriteLocl an object of the given source type
|
||||
* into an object of the given Mongo native target type.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param expectedTargetType
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
return getCustomWriteTarget(source, expectedTargetType) != null;
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
return getCustomWriteTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to read the given source into the given target type.
|
||||
* Returns whether we have a custom conversion registered to readTargetReadLock the given source into the given target
|
||||
* type.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomReadTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(expectedTargetType);
|
||||
|
||||
return getCustomReadTarget(source, expectedTargetType) != null;
|
||||
public boolean hasCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
return getCustomReadTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair} for ones that have a source compatible type as source. Additionally
|
||||
* checks assignabilty of the target type if one is given.
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType
|
||||
* @param pairs must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static Class<?> getCustomTarget(Class<?> source, Class<?> expectedTargetType, Iterable<ConvertiblePair> pairs) {
|
||||
private Class<?> getCustomReadTarget(final Class<?> sourceType, final Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(source);
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customReadTargetTypes,
|
||||
new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, requestedTargetType, readingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair}s for ones that have a source compatible type as source. Additionally
|
||||
* checks assignability of the target type if one is given.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @param pairs must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static Class<?> getCustomTarget(Class<?> sourceType, Class<?> requestedTargetType,
|
||||
Collection<ConvertiblePair> pairs) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(pairs);
|
||||
|
||||
if (requestedTargetType != null && pairs.contains(new ConvertiblePair(sourceType, requestedTargetType))) {
|
||||
return requestedTargetType;
|
||||
}
|
||||
|
||||
for (ConvertiblePair typePair : pairs) {
|
||||
if (typePair.getSourceType().isAssignableFrom(source)) {
|
||||
if (typePair.getSourceType().isAssignableFrom(sourceType)) {
|
||||
Class<?> targetType = typePair.getTargetType();
|
||||
if (expectedTargetType == null || targetType.isAssignableFrom(expectedTargetType)) {
|
||||
if (requestedTargetType == null || targetType.isAssignableFrom(requestedTargetType)) {
|
||||
return targetType;
|
||||
}
|
||||
}
|
||||
@@ -309,27 +370,32 @@ public class CustomConversions {
|
||||
return null;
|
||||
}
|
||||
|
||||
private Class<?> getCustomReadTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
/**
|
||||
* Will try to find a value for the given key in the given cache or produce one using the given {@link Producer} and
|
||||
* store it in the cache.
|
||||
*
|
||||
* @param key the key to lookup a potentially existing value, must not be {@literal null}.
|
||||
* @param cache the cache to find the value in, must not be {@literal null}.
|
||||
* @param producer the {@link Producer} to create values to cache, must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static <T> Class<?> getOrCreateAndCache(T key, Map<T, CacheValue> cache, Producer producer) {
|
||||
|
||||
Class<?> type = expectedTargetType == null ? PlaceholderType.class : expectedTargetType;
|
||||
CacheValue cacheValue = cache.get(key);
|
||||
|
||||
Map<Class<?>, CacheValue> map;
|
||||
CacheValue toReturn;
|
||||
|
||||
if ((map = cache.get(source)) == null || (toReturn = map.get(type)) == null) {
|
||||
|
||||
Class<?> target = getCustomTarget(source, type, readingPairs);
|
||||
|
||||
if (cache.get(source) == null) {
|
||||
cache.put(source, new HashMap<Class<?>, CacheValue>());
|
||||
}
|
||||
|
||||
Map<Class<?>, CacheValue> value = cache.get(source);
|
||||
toReturn = target == null ? CacheValue.NULL : new CacheValue(target);
|
||||
value.put(type, toReturn);
|
||||
if (cacheValue != null) {
|
||||
return cacheValue.getType();
|
||||
}
|
||||
|
||||
return toReturn.clazz;
|
||||
Class<?> type = producer.get();
|
||||
cache.put(key, CacheValue.of(type));
|
||||
|
||||
return type;
|
||||
}
|
||||
|
||||
private interface Producer {
|
||||
|
||||
Class<?> get();
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
@@ -338,8 +404,10 @@ public class CustomConversions {
|
||||
INSTANCE;
|
||||
|
||||
public Set<ConvertiblePair> getConvertibleTypes() {
|
||||
|
||||
ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class);
|
||||
ConvertiblePair booleanToString = new ConvertiblePair(Character.class, String.class);
|
||||
|
||||
return new HashSet<ConvertiblePair>(Arrays.asList(localeToString, booleanToString));
|
||||
}
|
||||
|
||||
@@ -348,29 +416,29 @@ public class CustomConversions {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Placeholder type to allow registering not-found values in the converter cache.
|
||||
*
|
||||
* @author Patryk Wasik
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class PlaceholderType {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper to safely store {@literal null} values in the type cache.
|
||||
*
|
||||
* @author Patryk Wasik
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class CacheValue {
|
||||
|
||||
public static final CacheValue NULL = new CacheValue(null);
|
||||
private final Class<?> clazz;
|
||||
private static final CacheValue ABSENT = new CacheValue(null);
|
||||
|
||||
public CacheValue(Class<?> clazz) {
|
||||
this.clazz = clazz;
|
||||
private final Class<?> type;
|
||||
|
||||
public CacheValue(Class<?> type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public Class<?> getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
static CacheValue of(Class<?> type) {
|
||||
return type == null ? ABSENT : new CacheValue(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
class DBObjectAccessor {
|
||||
|
||||
private final DBObject dbObject;
|
||||
private final BasicDBObject dbObject;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DBObjectAccessor} for the given {@link DBObject}.
|
||||
@@ -46,7 +46,7 @@ class DBObjectAccessor {
|
||||
Assert.notNull(dbObject, "DBObject must not be null!");
|
||||
Assert.isInstanceOf(BasicDBObject.class, dbObject, "Given DBObject must be a BasicDBObject!");
|
||||
|
||||
this.dbObject = dbObject;
|
||||
this.dbObject = (BasicDBObject) dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -62,6 +62,11 @@ class DBObjectAccessor {
|
||||
Assert.notNull(prop, "MongoPersistentProperty must not be null!");
|
||||
String fieldName = prop.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
dbObject.put(fieldName, value);
|
||||
return;
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
DBObject dbObject = this.dbObject;
|
||||
|
||||
@@ -87,12 +92,16 @@ class DBObjectAccessor {
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return this.dbObject.get(fieldName);
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<Object, Object> source = this.dbObject.toMap();
|
||||
Map<String, Object> source = this.dbObject;
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
@@ -108,14 +117,14 @@ class DBObjectAccessor {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<Object, Object> getAsMap(Object source) {
|
||||
private Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return ((DBObject) source).toMap();
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<Object, Object>) source;
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,18 +31,30 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
class DBObjectPropertyAccessor extends MapAccessor {
|
||||
|
||||
static MapAccessor INSTANCE = new DBObjectPropertyAccessor();
|
||||
static final MapAccessor INSTANCE = new DBObjectPropertyAccessor();
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#getSpecificTargetClasses()
|
||||
*/
|
||||
@Override
|
||||
public Class<?>[] getSpecificTargetClasses() {
|
||||
return new Class[] { DBObject.class };
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#canRead(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public boolean canRead(EvaluationContext context, Object target, String name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#read(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public TypedValue read(EvaluationContext context, Object target, String name) {
|
||||
@@ -52,4 +64,4 @@ class DBObjectPropertyAccessor extends MapAccessor {
|
||||
Object value = source.get(name);
|
||||
return value == null ? TypedValue.NULL : new TypedValue(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface DbRefProxyHandler {
|
||||
|
||||
Object populateId(MongoPersistentProperty property, DBRef source, Object proxy);
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,15 +24,23 @@ import com.mongodb.DBRef;
|
||||
* Used to resolve associations annotated with {@link org.springframework.data.mongodb.core.mapping.DBRef}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
public interface DbRefResolver {
|
||||
|
||||
/**
|
||||
* Resolves the given {@link DBRef} into an object of the given {@link MongoPersistentProperty}'s type. The method
|
||||
* might return a proxy object for the {@link DBRef} or resolve it immediately. In both cases the
|
||||
* {@link DbRefResolverCallback} will be used to obtain the actual backing object.
|
||||
*
|
||||
* @param property will never be {@literal null}.
|
||||
* @param dbref the {@link DBRef} to resolve.
|
||||
* @param callback will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object resolveDbRef(MongoPersistentProperty property, DbRefResolverCallback callback);
|
||||
Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback,
|
||||
DbRefProxyHandler proxyHandler);
|
||||
|
||||
/**
|
||||
* Creates a {@link DBRef} instance for the given {@link org.springframework.data.mongodb.core.mapping.DBRef}
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.BeanWrapper;
|
||||
import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator;
|
||||
import org.springframework.data.mapping.model.SpELContext;
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class DefaultDbRefProxyHandler implements DbRefProxyHandler {
|
||||
|
||||
private final SpELContext spELContext;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final ValueResolver resolver;
|
||||
|
||||
/**
|
||||
* @param spELContext must not be {@literal null}.
|
||||
* @param conversionService must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public DefaultDbRefProxyHandler(SpELContext spELContext,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext, ValueResolver resolver) {
|
||||
|
||||
this.spELContext = spELContext;
|
||||
this.mappingContext = mappingContext;
|
||||
this.resolver = resolver;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DbRefProxyHandler#populateId(com.mongodb.DBRef, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Object populateId(MongoPersistentProperty property, DBRef source, Object proxy) {
|
||||
|
||||
if (source == null) {
|
||||
return proxy;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext.getPersistentEntity(property);
|
||||
MongoPersistentProperty idProperty = persistentEntity.getIdProperty();
|
||||
|
||||
if(idProperty.usePropertyAccess()) {
|
||||
return proxy;
|
||||
}
|
||||
|
||||
SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(proxy, spELContext);
|
||||
BeanWrapper<Object> proxyWrapper = BeanWrapper.create(proxy, null);
|
||||
|
||||
DBObject object = new BasicDBObject(idProperty.getFieldName(), source.getId());
|
||||
ObjectPath objectPath = ObjectPath.ROOT.push(proxy, persistentEntity, null);
|
||||
proxyWrapper.setProperty(idProperty, resolver.getValueInternal(idProperty, object, evaluator, objectPath));
|
||||
|
||||
return proxy;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
@@ -23,22 +25,19 @@ import java.lang.reflect.Method;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.objenesis.Objenesis;
|
||||
import org.objenesis.ObjenesisStd;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.core.SpringVersion;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -51,14 +50,13 @@ import com.mongodb.DBRef;
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
private static final boolean IS_SPRING_4_OR_BETTER = SpringVersion.getVersion().startsWith("4");
|
||||
private static final boolean OBJENESIS_PRESENT = ClassUtils.isPresent("org.objenesis.Objenesis", null);
|
||||
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDbFactory}.
|
||||
@@ -71,6 +69,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.exceptionTranslator = mongoDbFactory.getExceptionTranslator();
|
||||
this.objenesis = new ObjenesisStd(true);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -78,13 +77,14 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
* @see org.springframework.data.mongodb.core.convert.DbRefResolver#resolveDbRef(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.convert.DbRefResolverCallback)
|
||||
*/
|
||||
@Override
|
||||
public Object resolveDbRef(MongoPersistentProperty property, DbRefResolverCallback callback) {
|
||||
public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback,
|
||||
DbRefProxyHandler handler) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
if (isLazyDbRef(property)) {
|
||||
return createLazyLoadingProxy(property, callback);
|
||||
return createLazyLoadingProxy(property, dbref, callback, handler);
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
@@ -109,39 +109,57 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
* eventually resolve the value of the property.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param dbref can be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, DbRefResolverCallback callback) {
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback,
|
||||
DbRefProxyHandler handler) {
|
||||
|
||||
Class<?> propertyType = property.getType();
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, dbref, exceptionTranslator, callback);
|
||||
|
||||
if (!propertyType.isInterface()) {
|
||||
|
||||
Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType));
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
|
||||
return handler.populateId(property, dbref, factory);
|
||||
}
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
Class<?> propertyType = property.getType();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, exceptionTranslator, callback);
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
|
||||
if (propertyType.isInterface()) {
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
return proxyFactory.getProxy();
|
||||
}
|
||||
|
||||
proxyFactory.setProxyTargetClass(true);
|
||||
proxyFactory.setTargetClass(propertyType);
|
||||
|
||||
if (IS_SPRING_4_OR_BETTER || !OBJENESIS_PRESENT) {
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
return proxyFactory.getProxy();
|
||||
}
|
||||
|
||||
return ObjenesisProxyEnhancer.enhanceAndGet(proxyFactory, propertyType, interceptor);
|
||||
return handler.populateId(property, dbref, proxyFactory.getProxy());
|
||||
}
|
||||
|
||||
/**
|
||||
* @param property
|
||||
* Returns the CGLib enhanced type for the given source type.
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getEnhancedTypeFor(Class<?> type) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
return enhancer.createClass();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the property shall be resolved lazily.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private boolean isLazyDbRef(MongoPersistentProperty property) {
|
||||
@@ -154,31 +172,48 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
* guaranteed to be performed only once.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor,
|
||||
Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD;
|
||||
|
||||
private final DbRefResolverCallback callback;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private volatile boolean resolved;
|
||||
private Object result;
|
||||
private DBRef dbref;
|
||||
|
||||
static {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LazyLoadingInterceptor} for the given {@link MongoPersistentProperty},
|
||||
* {@link PersistenceExceptionTranslator} and {@link DbRefResolverCallback}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param dbref can be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
*/
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, PersistenceExceptionTranslator exceptionTranslator,
|
||||
DbRefResolverCallback callback) {
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, DBRef dbref,
|
||||
PersistenceExceptionTranslator exceptionTranslator, DbRefResolverCallback callback) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "Exception translator must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
this.dbref = dbref;
|
||||
this.callback = callback;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.property = property;
|
||||
@@ -199,9 +234,100 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
@Override
|
||||
public Object intercept(Object obj, Method method, Object[] args, MethodProxy proxy) throws Throwable {
|
||||
return ReflectionUtils.isObjectMethod(method) ? method.invoke(obj, args) : method.invoke(ensureResolved(), args);
|
||||
|
||||
if (INITIALIZE_METHOD.equals(method)) {
|
||||
return ensureResolved();
|
||||
}
|
||||
|
||||
if (TO_DBREF_METHOD.equals(method)) {
|
||||
return this.dbref;
|
||||
}
|
||||
|
||||
if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) {
|
||||
|
||||
if (ReflectionUtils.isToStringMethod(method)) {
|
||||
return proxyToString(proxy);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isEqualsMethod(method)) {
|
||||
return proxyEquals(proxy, args[0]);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode(proxy);
|
||||
}
|
||||
|
||||
// DATAMONGO-1076 - finalize methods should not trigger proxy initialization
|
||||
if (FINALIZE_METHOD.equals(method)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
if (target == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return method.invoke(target, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a to string representation for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private String proxyToString(Object proxy) {
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (dbref != null) {
|
||||
description.append(dbref.getRef());
|
||||
description.append(":");
|
||||
description.append(dbref.getId());
|
||||
} else {
|
||||
description.append(System.identityHashCode(proxy));
|
||||
}
|
||||
description.append("$").append(LazyLoadingProxy.class.getSimpleName());
|
||||
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hashcode for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private int proxyHashCode(Object proxy) {
|
||||
return proxyToString(proxy).hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an equality check for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @param that
|
||||
* @return
|
||||
*/
|
||||
private boolean proxyEquals(Object proxy, Object that) {
|
||||
|
||||
if (!(that instanceof LazyLoadingProxy)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (that == proxy) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return proxyToString(proxy).equals(that.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Will trigger the resolution if the proxy is not resolved already or return a previously resolved result.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
@@ -212,16 +338,28 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
return this.result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for serialization.
|
||||
*
|
||||
* @param out
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for deserialization.
|
||||
*
|
||||
* @param in
|
||||
* @throws IOException
|
||||
*/
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true; // Object is guaranteed to be resolved after serializations
|
||||
this.resolved = true;
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
@@ -229,6 +367,8 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the proxy into its backing object.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private synchronized Object resolve() {
|
||||
@@ -248,35 +388,5 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean isResolved() {
|
||||
return resolved;
|
||||
}
|
||||
|
||||
public Object getResult() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Static class to accomodate optional dependency on Objenesis.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class ObjenesisProxyEnhancer {
|
||||
|
||||
private static final Objenesis OBJENESIS = new ObjenesisStd(true);
|
||||
|
||||
public static Object enhanceAndGet(ProxyFactory proxyFactory, Class<?> type,
|
||||
org.springframework.cglib.proxy.MethodInterceptor interceptor) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
|
||||
Factory factory = (Factory) OBJENESIS.newInstance(enhancer.createClass());
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
return factory;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Default implementation of {@link DbRefResolverCallback}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class DefaultDbRefResolverCallback implements DbRefResolverCallback {
|
||||
|
||||
private final DBObject surroundingObject;
|
||||
private final ObjectPath path;
|
||||
private final ValueResolver resolver;
|
||||
private final SpELExpressionEvaluator evaluator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolverCallback} using the given {@link DBObject}, {@link ObjectPath},
|
||||
* {@link ValueResolver} and {@link SpELExpressionEvaluator}.
|
||||
*
|
||||
* @param surroundingObject must not be {@literal null}.
|
||||
* @param path must not be {@literal null}.
|
||||
* @param evaluator must not be {@literal null}.
|
||||
* @param resolver must not be {@literal null}.
|
||||
*/
|
||||
public DefaultDbRefResolverCallback(DBObject surroundingObject, ObjectPath path, SpELExpressionEvaluator evaluator,
|
||||
ValueResolver resolver) {
|
||||
|
||||
this.surroundingObject = surroundingObject;
|
||||
this.path = path;
|
||||
this.resolver = resolver;
|
||||
this.evaluator = evaluator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DbRefResolverCallback#resolve(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
|
||||
*/
|
||||
@Override
|
||||
public Object resolve(MongoPersistentProperty property) {
|
||||
return resolver.getValueInternal(property, surroundingObject, evaluator, path);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,448 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.geo.Box;
|
||||
import org.springframework.data.geo.Circle;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Polygon;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
import org.springframework.data.mongodb.core.query.GeoCommand;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Wrapper class to contain useful geo structure converters for the usage with Mongo.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.5
|
||||
*/
|
||||
abstract class GeoConverters {
|
||||
|
||||
/**
|
||||
* Private constructor to prevent instantiation.
|
||||
*/
|
||||
private GeoConverters() {}
|
||||
|
||||
/**
|
||||
* Returns the geo converters to be registered.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static Collection<? extends Object> getConvertersToRegister() {
|
||||
return Arrays.asList( //
|
||||
BoxToDbObjectConverter.INSTANCE //
|
||||
, PolygonToDbObjectConverter.INSTANCE //
|
||||
, CircleToDbObjectConverter.INSTANCE //
|
||||
, SphereToDbObjectConverter.INSTANCE //
|
||||
, DbObjectToBoxConverter.INSTANCE //
|
||||
, DbObjectToPolygonConverter.INSTANCE //
|
||||
, DbObjectToCircleConverter.INSTANCE //
|
||||
, DbObjectToSphereConverter.INSTANCE //
|
||||
, DbObjectToPointConverter.INSTANCE //
|
||||
, PointToDbObjectConverter.INSTANCE //
|
||||
, GeoCommandToDbObjectConverter.INSTANCE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link List} of {@link Double}s into a {@link Point}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum DbObjectToPointConverter implements Converter<DBObject, Point> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Point convert(DBObject source) {
|
||||
|
||||
Assert.isTrue(source.keySet().size() == 2, "Source must contain 2 elements");
|
||||
|
||||
return source == null ? null : new Point((Double) source.get("x"), (Double) source.get("y"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link Point} into a {@link List} of {@link Double}s.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public static enum PointToDbObjectConverter implements Converter<Point, DBObject> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DBObject convert(Point source) {
|
||||
return source == null ? null : new BasicDBObject("x", source.getX()).append("y", source.getY());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link Box} into a {@link BasicDBList}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum BoxToDbObjectConverter implements Converter<Box, DBObject> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DBObject convert(Box source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
BasicDBObject result = new BasicDBObject();
|
||||
result.put("first", PointToDbObjectConverter.INSTANCE.convert(source.getFirst()));
|
||||
result.put("second", PointToDbObjectConverter.INSTANCE.convert(source.getSecond()));
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link BasicDBList} into a {@link Box}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum DbObjectToBoxConverter implements Converter<DBObject, Box> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Box convert(DBObject source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Point first = DbObjectToPointConverter.INSTANCE.convert((DBObject) source.get("first"));
|
||||
Point second = DbObjectToPointConverter.INSTANCE.convert((DBObject) source.get("second"));
|
||||
|
||||
return new Box(first, second);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link Circle} into a {@link BasicDBList}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public static enum CircleToDbObjectConverter implements Converter<Circle, DBObject> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DBObject convert(Circle source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject result = new BasicDBObject();
|
||||
result.put("center", PointToDbObjectConverter.INSTANCE.convert(source.getCenter()));
|
||||
result.put("radius", source.getRadius().getNormalizedValue());
|
||||
result.put("metric", source.getRadius().getMetric().toString());
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link DBObject} into a {@link Circle}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum DbObjectToCircleConverter implements Converter<DBObject, Circle> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Circle convert(DBObject source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject center = (DBObject) source.get("center");
|
||||
Double radius = (Double) source.get("radius");
|
||||
|
||||
Distance distance = new Distance(radius);
|
||||
|
||||
if (source.containsField("metric")) {
|
||||
|
||||
String metricString = (String) source.get("metric");
|
||||
Assert.notNull(metricString, "Metric must not be null!");
|
||||
|
||||
distance = distance.in(Metrics.valueOf(metricString));
|
||||
}
|
||||
|
||||
Assert.notNull(center, "Center must not be null!");
|
||||
Assert.notNull(radius, "Radius must not be null!");
|
||||
|
||||
return new Circle(DbObjectToPointConverter.INSTANCE.convert(center), distance);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link Sphere} into a {@link BasicDBList}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public static enum SphereToDbObjectConverter implements Converter<Sphere, DBObject> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DBObject convert(Sphere source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject result = new BasicDBObject();
|
||||
result.put("center", PointToDbObjectConverter.INSTANCE.convert(source.getCenter()));
|
||||
result.put("radius", source.getRadius().getNormalizedValue());
|
||||
result.put("metric", source.getRadius().getMetric().toString());
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link BasicDBList} into a {@link Sphere}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum DbObjectToSphereConverter implements Converter<DBObject, Sphere> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Sphere convert(DBObject source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject center = (DBObject) source.get("center");
|
||||
Double radius = (Double) source.get("radius");
|
||||
|
||||
Distance distance = new Distance(radius);
|
||||
|
||||
if (source.containsField("metric")) {
|
||||
|
||||
String metricString = (String) source.get("metric");
|
||||
Assert.notNull(metricString, "Metric must not be null!");
|
||||
|
||||
distance = distance.in(Metrics.valueOf(metricString));
|
||||
}
|
||||
|
||||
Assert.notNull(center, "Center must not be null!");
|
||||
Assert.notNull(radius, "Radius must not be null!");
|
||||
|
||||
return new Sphere(DbObjectToPointConverter.INSTANCE.convert(center), distance);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link Polygon} into a {@link BasicDBList}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public static enum PolygonToDbObjectConverter implements Converter<Polygon, DBObject> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DBObject convert(Polygon source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
List<Point> points = source.getPoints();
|
||||
List<DBObject> pointTuples = new ArrayList<DBObject>(points.size());
|
||||
|
||||
for (Point point : points) {
|
||||
pointTuples.add(PointToDbObjectConverter.INSTANCE.convert(point));
|
||||
}
|
||||
|
||||
DBObject result = new BasicDBObject();
|
||||
result.put("points", pointTuples);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link BasicDBList} into a {@link Polygon}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum DbObjectToPolygonConverter implements Converter<DBObject, Polygon> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
public Polygon convert(DBObject source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
List<DBObject> points = (List<DBObject>) source.get("points");
|
||||
List<Point> newPoints = new ArrayList<Point>(points.size());
|
||||
|
||||
for (DBObject element : points) {
|
||||
|
||||
Assert.notNull(element, "Point elements of polygon must not be null!");
|
||||
newPoints.add(DbObjectToPointConverter.INSTANCE.convert(element));
|
||||
}
|
||||
|
||||
return new Polygon(newPoints);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link Sphere} into a {@link BasicDBList}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public static enum GeoCommandToDbObjectConverter implements Converter<GeoCommand, DBObject> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DBObject convert(GeoCommand source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
BasicDBList argument = new BasicDBList();
|
||||
|
||||
Shape shape = source.getShape();
|
||||
|
||||
if (shape instanceof Box) {
|
||||
|
||||
argument.add(toList(((Box) shape).getFirst()));
|
||||
argument.add(toList(((Box) shape).getSecond()));
|
||||
|
||||
} else if (shape instanceof Circle) {
|
||||
|
||||
argument.add(toList(((Circle) shape).getCenter()));
|
||||
argument.add(((Circle) shape).getRadius().getNormalizedValue());
|
||||
|
||||
} else if (shape instanceof Circle) {
|
||||
|
||||
argument.add(toList(((Circle) shape).getCenter()));
|
||||
argument.add(((Circle) shape).getRadius());
|
||||
|
||||
} else if (shape instanceof Polygon) {
|
||||
|
||||
for (Point point : ((Polygon) shape).getPoints()) {
|
||||
argument.add(toList(point));
|
||||
}
|
||||
|
||||
} else if (shape instanceof Sphere) {
|
||||
|
||||
argument.add(toList(((Sphere) shape).getCenter()));
|
||||
argument.add(((Sphere) shape).getRadius().getNormalizedValue());
|
||||
}
|
||||
|
||||
return new BasicDBObject(source.getCommand(), argument);
|
||||
}
|
||||
}
|
||||
|
||||
static List<Double> toList(Point point) {
|
||||
return Arrays.asList(point.getX(), point.getY());
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,29 +13,34 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.List;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Common interface for all shapes. Allows building MongoDB representations of them.
|
||||
* Allows direct interaction with the underlying {@link LazyLoadingInterceptor}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public interface Shape {
|
||||
public interface LazyLoadingProxy {
|
||||
|
||||
/**
|
||||
* Returns the {@link Shape} as a list of usually {@link Double} or {@link List}s of {@link Double}s. Wildcard bound
|
||||
* to allow implementations to return a more concrete element type.
|
||||
* Initializes the proxy and returns the wrapped value.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
List<? extends Object> asList();
|
||||
Object getTarget();
|
||||
|
||||
/**
|
||||
* Returns the command to be used to create the {@literal $within} criterion.
|
||||
* Returns the {@link DBRef} represented by this {@link LazyLoadingProxy}, may be null.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
String getCommand();
|
||||
DBRef toDBRef();
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
* Copyright 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,10 +29,10 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.ConversionServiceFactory;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.convert.CollectionFactory;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.Association;
|
||||
@@ -56,6 +56,7 @@ import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
@@ -71,17 +72,20 @@ import com.mongodb.DBRef;
|
||||
* @author Jon Brisbin
|
||||
* @author Patrik Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware, ValueResolver {
|
||||
|
||||
protected static final Logger log = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s";
|
||||
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
|
||||
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
protected final SpelExpressionParser spelExpressionParser = new SpelExpressionParser();
|
||||
protected final QueryMapper idMapper;
|
||||
protected final DbRefResolver dbRefResolver;
|
||||
|
||||
protected ApplicationContext applicationContext;
|
||||
protected boolean useFieldAccessOnly = true;
|
||||
protected MongoTypeMapper typeMapper;
|
||||
protected String mapKeyDotReplacement = null;
|
||||
|
||||
@@ -93,11 +97,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public MappingMongoConverter(DbRefResolver dbRefResolver,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(ConversionServiceFactory.createDefaultConversionService());
|
||||
super(new DefaultConversionService());
|
||||
|
||||
Assert.notNull(dbRefResolver, "DbRefResolver must not be null!");
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
@@ -132,8 +135,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param typeMapper the typeMapper to set
|
||||
*/
|
||||
public void setTypeMapper(MongoTypeMapper typeMapper) {
|
||||
this.typeMapper = typeMapper == null ? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY,
|
||||
mappingContext) : typeMapper;
|
||||
this.typeMapper = typeMapper == null
|
||||
? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext) : typeMapper;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -165,17 +168,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures whether to use field access only for entity mapping. Setting this to true will force the
|
||||
* {@link MongoConverter} to not go through getters or setters even if they are present for getting and setting
|
||||
* property values.
|
||||
*
|
||||
* @param useFieldAccessOnly
|
||||
*/
|
||||
public void setUseFieldAccessOnly(boolean useFieldAccessOnly) {
|
||||
this.useFieldAccessOnly = useFieldAccessOnly;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
|
||||
@@ -195,11 +187,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
protected <S extends Object> S read(TypeInformation<S> type, DBObject dbo) {
|
||||
return read(type, dbo, null);
|
||||
return read(type, dbo, ObjectPath.ROOT);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <S extends Object> S read(TypeInformation<S> type, DBObject dbo, Object parent) {
|
||||
private <S extends Object> S read(TypeInformation<S> type, DBObject dbo, ObjectPath path) {
|
||||
|
||||
if (null == dbo) {
|
||||
return null;
|
||||
@@ -217,11 +209,15 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (typeToUse.isCollectionLike() && dbo instanceof BasicDBList) {
|
||||
return (S) readCollectionOrArray(typeToUse, (BasicDBList) dbo, parent);
|
||||
return (S) readCollectionOrArray(typeToUse, (BasicDBList) dbo, path);
|
||||
}
|
||||
|
||||
if (typeToUse.isMap()) {
|
||||
return (S) readMap(typeToUse, dbo, parent);
|
||||
return (S) readMap(typeToUse, dbo, path);
|
||||
}
|
||||
|
||||
if (dbo instanceof BasicDBList) {
|
||||
throw new MappingException(String.format(INCOMPATIBLE_TYPES, dbo, BasicDBList.class, typeToUse.getType(), path));
|
||||
}
|
||||
|
||||
// Retrieve persistent entity info
|
||||
@@ -231,41 +227,56 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("No mapping metadata found for " + rawType.getName());
|
||||
}
|
||||
|
||||
return read(persistentEntity, dbo, parent);
|
||||
return read(persistentEntity, dbo, path);
|
||||
}
|
||||
|
||||
private ParameterValueProvider<MongoPersistentProperty> getParameterProvider(MongoPersistentEntity<?> entity,
|
||||
DBObject source, DefaultSpELExpressionEvaluator evaluator, Object parent) {
|
||||
DBObject source, DefaultSpELExpressionEvaluator evaluator, ObjectPath path) {
|
||||
|
||||
MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(source, evaluator, parent);
|
||||
MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(source, evaluator, path);
|
||||
PersistentEntityParameterValueProvider<MongoPersistentProperty> parameterProvider = new PersistentEntityParameterValueProvider<MongoPersistentProperty>(
|
||||
entity, provider, parent);
|
||||
entity, provider, path.getCurrentObject());
|
||||
|
||||
return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider,
|
||||
parent);
|
||||
path);
|
||||
}
|
||||
|
||||
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, final Object parent) {
|
||||
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, final ObjectPath path) {
|
||||
|
||||
final DefaultSpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(dbo, spELContext);
|
||||
|
||||
ParameterValueProvider<MongoPersistentProperty> provider = getParameterProvider(entity, dbo, evaluator, parent);
|
||||
ParameterValueProvider<MongoPersistentProperty> provider = getParameterProvider(entity, dbo, evaluator, path);
|
||||
EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity);
|
||||
S instance = instantiator.createInstance(entity, provider);
|
||||
|
||||
final BeanWrapper<MongoPersistentEntity<S>, S> wrapper = BeanWrapper.create(instance, conversionService);
|
||||
final BeanWrapper<S> wrapper = BeanWrapper.create(instance, conversionService);
|
||||
final MongoPersistentProperty idProperty = entity.getIdProperty();
|
||||
final S result = wrapper.getBean();
|
||||
|
||||
// make sure id property is set before all other properties
|
||||
Object idValue = null;
|
||||
|
||||
if (idProperty != null) {
|
||||
idValue = getValueInternal(idProperty, dbo, evaluator, path);
|
||||
wrapper.setProperty(idProperty, idValue);
|
||||
}
|
||||
|
||||
final ObjectPath currentPath = path.push(result, entity, idValue);
|
||||
|
||||
// Set properties not already set in the constructor
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
public void doWithPersistentProperty(MongoPersistentProperty prop) {
|
||||
|
||||
// we skip the id property since it was already set
|
||||
if (idProperty != null && idProperty.equals(prop)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!dbo.containsField(prop.getFieldName()) || entity.isConstructorArgument(prop)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Object obj = getValueInternal(prop, dbo, evaluator, result);
|
||||
wrapper.setProperty(prop, obj, useFieldAccessOnly);
|
||||
wrapper.setProperty(prop, getValueInternal(prop, dbo, evaluator, currentPath));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -273,17 +284,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
MongoPersistentProperty inverseProp = association.getInverse();
|
||||
final MongoPersistentProperty property = association.getInverse();
|
||||
Object value = dbo.get(property.getFieldName());
|
||||
|
||||
Object obj = dbRefResolver.resolveDbRef(inverseProp, new DbRefResolverCallback() {
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object resolve(MongoPersistentProperty property) {
|
||||
return getValueInternal(property, dbo, evaluator, parent);
|
||||
}
|
||||
});
|
||||
DBRef dbref = value instanceof DBRef ? (DBRef) value : null;
|
||||
|
||||
wrapper.setProperty(inverseProp, obj);
|
||||
DbRefProxyHandler handler = new DefaultDbRefProxyHandler(spELContext, mappingContext,
|
||||
MappingMongoConverter.this);
|
||||
DbRefResolverCallback callback = new DefaultDbRefResolverCallback(dbo, currentPath, evaluator,
|
||||
MappingMongoConverter.this);
|
||||
|
||||
wrapper.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -303,7 +318,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef!");
|
||||
}
|
||||
|
||||
return createDBRef(object, annotation);
|
||||
// @see DATAMONGO-913
|
||||
if (object instanceof LazyLoadingProxy) {
|
||||
return ((LazyLoadingProxy) object).toDBRef();
|
||||
}
|
||||
|
||||
return createDBRef(object, referingProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -318,14 +338,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
boolean handledByCustomConverter = conversions.getCustomWriteTarget(obj.getClass(), DBObject.class) != null;
|
||||
TypeInformation<? extends Object> type = ClassTypeInformation.from(obj.getClass());
|
||||
Class<?> entityType = obj.getClass();
|
||||
boolean handledByCustomConverter = conversions.getCustomWriteTarget(entityType, DBObject.class) != null;
|
||||
TypeInformation<? extends Object> type = ClassTypeInformation.from(entityType);
|
||||
|
||||
if (!handledByCustomConverter && !(dbo instanceof BasicDBList)) {
|
||||
typeMapper.writeType(type, dbo);
|
||||
}
|
||||
|
||||
writeInternal(obj, dbo, type);
|
||||
Object target = obj instanceof LazyLoadingProxy ? ((LazyLoadingProxy) obj).getTarget() : obj;
|
||||
|
||||
writeInternal(target, dbo, type);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -341,7 +364,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
Class<?> customTarget = conversions.getCustomWriteTarget(obj.getClass(), DBObject.class);
|
||||
Class<?> entityType = obj.getClass();
|
||||
Class<?> customTarget = conversions.getCustomWriteTarget(entityType, DBObject.class);
|
||||
|
||||
if (customTarget != null) {
|
||||
DBObject result = conversionService.convert(obj, DBObject.class);
|
||||
@@ -349,17 +373,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
if (Map.class.isAssignableFrom(obj.getClass())) {
|
||||
if (Map.class.isAssignableFrom(entityType)) {
|
||||
writeMapInternal((Map<Object, Object>) obj, dbo, ClassTypeInformation.MAP);
|
||||
return;
|
||||
}
|
||||
|
||||
if (Collection.class.isAssignableFrom(obj.getClass())) {
|
||||
if (Collection.class.isAssignableFrom(entityType)) {
|
||||
writeCollectionInternal((Collection<?>) obj, ClassTypeInformation.LIST, (BasicDBList) dbo);
|
||||
return;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(obj.getClass());
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityType);
|
||||
writeInternal(obj, dbo, entity);
|
||||
addCustomTypeKeyIfNecessary(typeHint, obj, dbo);
|
||||
}
|
||||
@@ -374,15 +398,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("No mapping metadata found for entity of type " + obj.getClass().getName());
|
||||
}
|
||||
|
||||
final BeanWrapper<MongoPersistentEntity<Object>, Object> wrapper = BeanWrapper.create(obj, conversionService);
|
||||
final BeanWrapper<Object> wrapper = BeanWrapper.create(obj, conversionService);
|
||||
final MongoPersistentProperty idProperty = entity.getIdProperty();
|
||||
|
||||
if (!dbo.containsField("_id") && null != idProperty) {
|
||||
|
||||
boolean fieldAccessOnly = idProperty.usePropertyAccess() ? false : useFieldAccessOnly;
|
||||
|
||||
try {
|
||||
Object id = wrapper.getProperty(idProperty, Object.class, fieldAccessOnly);
|
||||
Object id = wrapper.getProperty(idProperty, Object.class);
|
||||
dbo.put("_id", idMapper.convertId(id));
|
||||
} catch (ConversionException ignored) {}
|
||||
}
|
||||
@@ -391,15 +413,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
public void doWithPersistentProperty(MongoPersistentProperty prop) {
|
||||
|
||||
if (prop.equals(idProperty)) {
|
||||
if (prop.equals(idProperty) || !prop.isWritable()) {
|
||||
return;
|
||||
}
|
||||
|
||||
boolean fieldAccessOnly = prop.usePropertyAccess() ? false : useFieldAccessOnly;
|
||||
|
||||
Object propertyObj = wrapper.getProperty(prop, prop.getType(), fieldAccessOnly);
|
||||
Object propertyObj = wrapper.getProperty(prop);
|
||||
|
||||
if (null != propertyObj) {
|
||||
|
||||
if (!conversions.isSimpleType(propertyObj.getClass())) {
|
||||
writePropertyInternal(propertyObj, dbo, prop);
|
||||
} else {
|
||||
@@ -413,7 +434,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
MongoPersistentProperty inverseProp = association.getInverse();
|
||||
Class<?> type = inverseProp.getType();
|
||||
Object propertyObj = wrapper.getProperty(inverseProp, type, useFieldAccessOnly);
|
||||
Object propertyObj = wrapper.getProperty(inverseProp, type);
|
||||
if (null != propertyObj) {
|
||||
writePropertyInternal(propertyObj, dbo, inverseProp);
|
||||
}
|
||||
@@ -446,13 +467,32 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (prop.isDbReference()) {
|
||||
DBRef dbRefObj = createDBRef(obj, prop.getDBRef());
|
||||
|
||||
DBRef dbRefObj = null;
|
||||
|
||||
/*
|
||||
* If we already have a LazyLoadingProxy, we use it's cached DBRef value instead of
|
||||
* unnecessarily initializing it only to convert it to a DBRef a few instructions later.
|
||||
*/
|
||||
if (obj instanceof LazyLoadingProxy) {
|
||||
dbRefObj = ((LazyLoadingProxy) obj).toDBRef();
|
||||
}
|
||||
|
||||
dbRefObj = dbRefObj != null ? dbRefObj : createDBRef(obj, prop);
|
||||
|
||||
if (null != dbRefObj) {
|
||||
accessor.put(prop, dbRefObj);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* If we have a LazyLoadingProxy we make sure it is initialized first.
|
||||
*/
|
||||
if (obj instanceof LazyLoadingProxy) {
|
||||
obj = ((LazyLoadingProxy) obj).getTarget();
|
||||
}
|
||||
|
||||
// Lookup potential custom target type
|
||||
Class<?> basicTargetType = conversions.getCustomWriteTarget(obj.getClass(), null);
|
||||
|
||||
@@ -466,8 +506,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
: new BasicDBObject();
|
||||
addCustomTypeKeyIfNecessary(type, obj, propDbObj);
|
||||
|
||||
MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass()) ? mappingContext
|
||||
.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type);
|
||||
MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass())
|
||||
? mappingContext.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type);
|
||||
|
||||
writeInternal(obj, propDbObj, entity);
|
||||
accessor.put(prop, propDbObj);
|
||||
@@ -515,7 +555,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
continue;
|
||||
}
|
||||
|
||||
DBRef dbRef = createDBRef(element, property.getDBRef());
|
||||
DBRef dbRef = createDBRef(element, property);
|
||||
dbList.add(dbRef);
|
||||
}
|
||||
|
||||
@@ -547,8 +587,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
dbObject.put(simpleKey, value != null ? createDBRef(value, property.getDBRef()) : null);
|
||||
String simpleKey = prepareMapKey(key.toString());
|
||||
dbObject.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
|
||||
} else {
|
||||
throw new MappingException("Cannot use a complex object as a key value.");
|
||||
@@ -599,12 +639,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected DBObject writeMapInternal(Map<Object, Object> obj, DBObject dbo, TypeInformation<?> propertyType) {
|
||||
|
||||
for (Map.Entry<Object, Object> entry : obj.entrySet()) {
|
||||
|
||||
Object key = entry.getKey();
|
||||
Object val = entry.getValue();
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
// Don't use conversion service here as removal of ObjectToString converter results in some primitive types not
|
||||
// being convertable
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
|
||||
String simpleKey = prepareMapKey(key);
|
||||
if (val == null || conversions.isSimpleType(val.getClass())) {
|
||||
writeSimpleInternal(val, dbo, simpleKey);
|
||||
} else if (val instanceof Collection || val.getClass().isArray()) {
|
||||
@@ -625,6 +666,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the given {@link Map} key to be converted into a {@link String}. Will invoke potentially registered custom
|
||||
* conversions and escape dots from the result as they're not supported as {@link Map} key in MongoDB.
|
||||
*
|
||||
* @param key must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String prepareMapKey(Object key) {
|
||||
|
||||
Assert.notNull(key, "Map key must not be null!");
|
||||
|
||||
String convertedKey = potentiallyConvertMapKey(key);
|
||||
return potentiallyEscapeMapKey(convertedKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Potentially replaces dots in the given map key with the configured map key replacement if configured or aborts
|
||||
* conversion if none is configured.
|
||||
@@ -640,13 +696,31 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (mapKeyDotReplacement == null) {
|
||||
throw new MappingException(String.format("Map key %s contains dots but no replacement was configured! Make "
|
||||
+ "sure map keys don't contain dots in the first place or configure an appropriate replacement!", source));
|
||||
throw new MappingException(String.format(
|
||||
"Map key %s contains dots but no replacement was configured! Make "
|
||||
+ "sure map keys don't contain dots in the first place or configure an appropriate replacement!",
|
||||
source));
|
||||
}
|
||||
|
||||
return source.replaceAll("\\.", mapKeyDotReplacement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link String} representation of the given {@link Map} key
|
||||
*
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
private String potentiallyConvertMapKey(Object key) {
|
||||
|
||||
if (key instanceof String) {
|
||||
return (String) key;
|
||||
}
|
||||
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class)
|
||||
? (String) getPotentiallyConvertedSimpleWrite(key) : key.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates the map key replacements in the given key just read with a dot in case a map key replacement has been
|
||||
* configured.
|
||||
@@ -668,12 +742,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*/
|
||||
protected void addCustomTypeKeyIfNecessary(TypeInformation<?> type, Object value, DBObject dbObject) {
|
||||
|
||||
TypeInformation<?> actualType = type != null ? type.getActualType() : type;
|
||||
TypeInformation<?> actualType = type != null ? type.getActualType() : null;
|
||||
Class<?> reference = actualType == null ? Object.class : actualType.getType();
|
||||
Class<?> valueType = ClassUtils.getUserClass(value.getClass());
|
||||
|
||||
boolean notTheSameClass = !value.getClass().equals(reference);
|
||||
boolean notTheSameClass = !valueType.equals(reference);
|
||||
if (notTheSameClass) {
|
||||
typeMapper.writeType(value.getClass(), dbObject);
|
||||
typeMapper.writeType(valueType, dbObject);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -726,7 +801,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private Object getPotentiallyConvertedSimpleRead(Object value, Class<?> target) {
|
||||
|
||||
if (value == null || target == null) {
|
||||
if (value == null || target == null || target.isAssignableFrom(value.getClass())) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -738,10 +813,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return Enum.valueOf((Class<Enum>) target, value.toString());
|
||||
}
|
||||
|
||||
return target.isAssignableFrom(value.getClass()) ? value : conversionService.convert(value, target);
|
||||
return conversionService.convert(value, target);
|
||||
}
|
||||
|
||||
protected DBRef createDBRef(Object target, org.springframework.data.mongodb.core.mapping.DBRef dbref) {
|
||||
protected DBRef createDBRef(Object target, MongoPersistentProperty property) {
|
||||
|
||||
Assert.notNull(target);
|
||||
|
||||
@@ -750,6 +825,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> targetEntity = mappingContext.getPersistentEntity(target.getClass());
|
||||
targetEntity = targetEntity == null ? targetEntity = mappingContext.getPersistentEntity(property) : targetEntity;
|
||||
|
||||
if (null == targetEntity) {
|
||||
throw new MappingException("No mapping metadata found for " + target.getClass());
|
||||
@@ -761,21 +837,31 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("No id property found on class " + targetEntity.getType());
|
||||
}
|
||||
|
||||
BeanWrapper<MongoPersistentEntity<Object>, Object> wrapper = BeanWrapper.create(target, conversionService);
|
||||
Object id = wrapper.getProperty(idProperty, Object.class, useFieldAccessOnly);
|
||||
Object id = null;
|
||||
|
||||
if (target.getClass().equals(idProperty.getType())) {
|
||||
id = target;
|
||||
} else {
|
||||
BeanWrapper<Object> wrapper = BeanWrapper.create(target, conversionService);
|
||||
id = wrapper.getProperty(idProperty, Object.class);
|
||||
}
|
||||
|
||||
if (null == id) {
|
||||
throw new MappingException("Cannot create a reference to an object with a NULL id.");
|
||||
}
|
||||
|
||||
return dbRefResolver.createDbRef(dbref, targetEntity, idMapper.convertId(id));
|
||||
return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), targetEntity,
|
||||
idMapper.convertId(id));
|
||||
}
|
||||
|
||||
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator eval,
|
||||
Object parent) {
|
||||
|
||||
MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(dbo, spELContext, parent);
|
||||
return provider.getPropertyValue(prop);
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.ValueResolver#getValueInternal(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, com.mongodb.DBObject, org.springframework.data.mapping.model.SpELExpressionEvaluator, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator evaluator,
|
||||
ObjectPath path) {
|
||||
return new MongoDbPropertyValueProvider(dbo, evaluator, path).getPropertyValue(prop);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -783,12 +869,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*
|
||||
* @param targetType must not be {@literal null}.
|
||||
* @param sourceValue must not be {@literal null}.
|
||||
* @param path must not be {@literal null}.
|
||||
* @return the converted {@link Collection} or array, will never be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Object readCollectionOrArray(TypeInformation<?> targetType, BasicDBList sourceValue, Object parent) {
|
||||
private Object readCollectionOrArray(TypeInformation<?> targetType, BasicDBList sourceValue, ObjectPath path) {
|
||||
|
||||
Assert.notNull(targetType);
|
||||
Assert.notNull(targetType, "Target type must not be null!");
|
||||
Assert.notNull(path, "Object path must not be null!");
|
||||
|
||||
Class<?> collectionType = targetType.getType();
|
||||
|
||||
@@ -796,22 +883,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return getPotentiallyConvertedSimpleRead(new HashSet<Object>(), collectionType);
|
||||
}
|
||||
|
||||
collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
|
||||
|
||||
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>() : CollectionFactory
|
||||
.createCollection(collectionType, sourceValue.size());
|
||||
TypeInformation<?> componentType = targetType.getComponentType();
|
||||
Class<?> rawComponentType = componentType == null ? null : componentType.getType();
|
||||
|
||||
collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
|
||||
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>()
|
||||
: CollectionFactory.createCollection(collectionType, rawComponentType, sourceValue.size());
|
||||
|
||||
for (int i = 0; i < sourceValue.size(); i++) {
|
||||
|
||||
Object dbObjItem = sourceValue.get(i);
|
||||
|
||||
if (dbObjItem instanceof DBRef) {
|
||||
items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem),
|
||||
parent));
|
||||
items.add(
|
||||
DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem), path));
|
||||
} else if (dbObjItem instanceof DBObject) {
|
||||
items.add(read(componentType, (DBObject) dbObjItem, parent));
|
||||
items.add(read(componentType, (DBObject) dbObjItem, path));
|
||||
} else {
|
||||
items.add(getPotentiallyConvertedSimpleRead(dbObjItem, rawComponentType));
|
||||
}
|
||||
@@ -824,16 +911,25 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* Reads the given {@link DBObject} into a {@link Map}. will recursively resolve nested {@link Map}s as well.
|
||||
*
|
||||
* @param type the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link DBObject}.
|
||||
* @param dbObject
|
||||
* @param dbObject must not be {@literal null}
|
||||
* @param path must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Map<Object, Object> readMap(TypeInformation<?> type, DBObject dbObject, Object parent) {
|
||||
protected Map<Object, Object> readMap(TypeInformation<?> type, DBObject dbObject, ObjectPath path) {
|
||||
|
||||
Assert.notNull(dbObject);
|
||||
Assert.notNull(dbObject, "DBObject must not be null!");
|
||||
Assert.notNull(path, "Object path must not be null!");
|
||||
|
||||
Class<?> mapType = typeMapper.readType(dbObject, type).getType();
|
||||
Map<Object, Object> map = CollectionFactory.createMap(mapType, dbObject.keySet().size());
|
||||
|
||||
TypeInformation<?> keyType = type.getComponentType();
|
||||
Class<?> rawKeyType = keyType == null ? null : keyType.getType();
|
||||
|
||||
TypeInformation<?> valueType = type.getMapValueType();
|
||||
Class<?> rawValueType = valueType == null ? null : valueType.getType();
|
||||
|
||||
Map<Object, Object> map = CollectionFactory.createMap(mapType, rawKeyType, dbObject.keySet().size());
|
||||
Map<String, Object> sourceMap = dbObject.toMap();
|
||||
|
||||
for (Entry<String, Object> entry : sourceMap.entrySet()) {
|
||||
@@ -843,18 +939,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
Object key = potentiallyUnescapeMapKey(entry.getKey());
|
||||
|
||||
TypeInformation<?> keyTypeInformation = type.getComponentType();
|
||||
if (keyTypeInformation != null) {
|
||||
Class<?> keyType = keyTypeInformation.getType();
|
||||
key = conversionService.convert(key, keyType);
|
||||
if (rawKeyType != null) {
|
||||
key = conversionService.convert(key, rawKeyType);
|
||||
}
|
||||
|
||||
Object value = entry.getValue();
|
||||
TypeInformation<?> valueType = type.getMapValueType();
|
||||
Class<?> rawValueType = valueType == null ? null : valueType.getType();
|
||||
|
||||
if (value instanceof DBObject) {
|
||||
map.put(key, read(valueType, (DBObject) value, parent));
|
||||
map.put(key, read(valueType, (DBObject) value, path));
|
||||
} else if (value instanceof DBRef) {
|
||||
map.put(key, DBRef.class.equals(rawValueType) ? value : read(valueType, readRef((DBRef) value)));
|
||||
} else {
|
||||
@@ -866,21 +958,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return map;
|
||||
}
|
||||
|
||||
protected <T> List<?> unwrapList(BasicDBList dbList, TypeInformation<T> targetType) {
|
||||
List<Object> rootList = new ArrayList<Object>();
|
||||
for (int i = 0; i < dbList.size(); i++) {
|
||||
Object obj = dbList.get(i);
|
||||
if (obj instanceof BasicDBList) {
|
||||
rootList.add(unwrapList((BasicDBList) obj, targetType.getComponentType()));
|
||||
} else if (obj instanceof DBObject) {
|
||||
rootList.add(read(targetType, (DBObject) obj));
|
||||
} else {
|
||||
rootList.add(obj);
|
||||
}
|
||||
}
|
||||
return rootList;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoWriter#convertToMongoType(java.lang.Object, org.springframework.data.util.TypeInformation)
|
||||
@@ -902,15 +979,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return getPotentiallyConvertedSimpleWrite(obj);
|
||||
}
|
||||
|
||||
TypeInformation<?> typeHint = typeInformation;
|
||||
|
||||
if (obj instanceof BasicDBList) {
|
||||
return maybeConvertList((BasicDBList) obj);
|
||||
return maybeConvertList((BasicDBList) obj, typeHint);
|
||||
}
|
||||
|
||||
if (obj instanceof DBObject) {
|
||||
DBObject newValueDbo = new BasicDBObject();
|
||||
for (String vk : ((DBObject) obj).keySet()) {
|
||||
Object o = ((DBObject) obj).get(vk);
|
||||
newValueDbo.put(vk, convertToMongoType(o));
|
||||
newValueDbo.put(vk, convertToMongoType(o, typeHint));
|
||||
}
|
||||
return newValueDbo;
|
||||
}
|
||||
@@ -918,44 +997,51 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (obj instanceof Map) {
|
||||
DBObject result = new BasicDBObject();
|
||||
for (Map.Entry<Object, Object> entry : ((Map<Object, Object>) obj).entrySet()) {
|
||||
result.put(entry.getKey().toString(), convertToMongoType(entry.getValue()));
|
||||
result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeHint));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if (obj.getClass().isArray()) {
|
||||
return maybeConvertList(Arrays.asList((Object[]) obj));
|
||||
return maybeConvertList(Arrays.asList((Object[]) obj), typeHint);
|
||||
}
|
||||
|
||||
if (obj instanceof Collection) {
|
||||
return maybeConvertList((Collection<?>) obj);
|
||||
return maybeConvertList((Collection<?>) obj, typeHint);
|
||||
}
|
||||
|
||||
DBObject newDbo = new BasicDBObject();
|
||||
this.write(obj, newDbo);
|
||||
|
||||
if (typeInformation == null) {
|
||||
return removeTypeInfoRecursively(newDbo);
|
||||
return removeTypeInfo(newDbo, true);
|
||||
}
|
||||
|
||||
return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfoRecursively(newDbo);
|
||||
if (typeInformation.getType().equals(NestedDocument.class)) {
|
||||
return removeTypeInfo(newDbo, false);
|
||||
}
|
||||
|
||||
return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfo(newDbo, true);
|
||||
}
|
||||
|
||||
public BasicDBList maybeConvertList(Iterable<?> source) {
|
||||
public BasicDBList maybeConvertList(Iterable<?> source, TypeInformation<?> typeInformation) {
|
||||
|
||||
BasicDBList newDbl = new BasicDBList();
|
||||
for (Object element : source) {
|
||||
newDbl.add(convertToMongoType(element));
|
||||
newDbl.add(convertToMongoType(element, typeInformation));
|
||||
}
|
||||
|
||||
return newDbl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the type information from the conversion result.
|
||||
* Removes the type information from the entire conversion result.
|
||||
*
|
||||
* @param object
|
||||
* @param recursively whether to apply the removal recursively
|
||||
* @return
|
||||
*/
|
||||
private Object removeTypeInfoRecursively(Object object) {
|
||||
private Object removeTypeInfo(Object object, boolean recursively) {
|
||||
|
||||
if (!(object instanceof DBObject)) {
|
||||
return object;
|
||||
@@ -963,19 +1049,29 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
DBObject dbObject = (DBObject) object;
|
||||
String keyToRemove = null;
|
||||
|
||||
for (String key : dbObject.keySet()) {
|
||||
|
||||
if (typeMapper.isTypeKey(key)) {
|
||||
keyToRemove = key;
|
||||
if (recursively) {
|
||||
|
||||
Object value = dbObject.get(key);
|
||||
|
||||
if (value instanceof BasicDBList) {
|
||||
for (Object element : (BasicDBList) value) {
|
||||
removeTypeInfo(element, recursively);
|
||||
}
|
||||
} else {
|
||||
removeTypeInfo(value, recursively);
|
||||
}
|
||||
}
|
||||
|
||||
Object value = dbObject.get(key);
|
||||
if (value instanceof BasicDBList) {
|
||||
for (Object element : (BasicDBList) value) {
|
||||
removeTypeInfoRecursively(element);
|
||||
if (typeMapper.isTypeKey(key)) {
|
||||
|
||||
keyToRemove = key;
|
||||
|
||||
if (!recursively) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
removeTypeInfoRecursively(value);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -986,24 +1082,34 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field
|
||||
* of the configured source {@link DBObject}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private class MongoDbPropertyValueProvider implements PropertyValueProvider<MongoPersistentProperty> {
|
||||
|
||||
private final DBObjectAccessor source;
|
||||
private final SpELExpressionEvaluator evaluator;
|
||||
private final Object parent;
|
||||
private final ObjectPath path;
|
||||
|
||||
public MongoDbPropertyValueProvider(DBObject source, SpELContext factory, Object parent) {
|
||||
this(source, new DefaultSpELExpressionEvaluator(source, factory), parent);
|
||||
}
|
||||
|
||||
public MongoDbPropertyValueProvider(DBObject source, DefaultSpELExpressionEvaluator evaluator, Object parent) {
|
||||
/**
|
||||
* Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and
|
||||
* {@link ObjectPath}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param evaluator must not be {@literal null}.
|
||||
* @param path can be {@literal null}.
|
||||
*/
|
||||
public MongoDbPropertyValueProvider(DBObject source, SpELExpressionEvaluator evaluator, ObjectPath path) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(evaluator);
|
||||
|
||||
this.source = new DBObjectAccessor(source);
|
||||
this.evaluator = evaluator;
|
||||
this.parent = parent;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1019,7 +1125,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return null;
|
||||
}
|
||||
|
||||
return readValue(value, property.getTypeInformation(), parent);
|
||||
return readValue(value, property.getTypeInformation(), path);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1029,10 +1135,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private class ConverterAwareSpELExpressionParameterValueProvider extends
|
||||
SpELExpressionParameterValueProvider<MongoPersistentProperty> {
|
||||
private class ConverterAwareSpELExpressionParameterValueProvider
|
||||
extends SpELExpressionParameterValueProvider<MongoPersistentProperty> {
|
||||
|
||||
private final Object parent;
|
||||
private final ObjectPath path;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}.
|
||||
@@ -1042,10 +1148,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param delegate must not be {@literal null}.
|
||||
*/
|
||||
public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator,
|
||||
ConversionService conversionService, ParameterValueProvider<MongoPersistentProperty> delegate, Object parent) {
|
||||
ConversionService conversionService, ParameterValueProvider<MongoPersistentProperty> delegate,
|
||||
ObjectPath path) {
|
||||
|
||||
super(evaluator, conversionService, delegate);
|
||||
this.parent = parent;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1054,28 +1161,44 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*/
|
||||
@Override
|
||||
protected <T> T potentiallyConvertSpelValue(Object object, Parameter<T, MongoPersistentProperty> parameter) {
|
||||
return readValue(object, parameter.getType(), parent);
|
||||
return readValue(object, parameter.getType(), path);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> T readValue(Object value, TypeInformation<?> type, Object parent) {
|
||||
private <T> T readValue(Object value, TypeInformation<?> type, ObjectPath path) {
|
||||
|
||||
Class<?> rawType = type.getType();
|
||||
|
||||
if (conversions.hasCustomReadTarget(value.getClass(), rawType)) {
|
||||
return (T) conversionService.convert(value, rawType);
|
||||
} else if (value instanceof DBRef) {
|
||||
return (T) (rawType.equals(DBRef.class) ? value : read(type, readRef((DBRef) value), parent));
|
||||
return potentiallyReadOrResolveDbRef((DBRef) value, type, path, rawType);
|
||||
} else if (value instanceof BasicDBList) {
|
||||
return (T) readCollectionOrArray(type, (BasicDBList) value, parent);
|
||||
return (T) readCollectionOrArray(type, (BasicDBList) value, path);
|
||||
} else if (value instanceof DBObject) {
|
||||
return (T) read(type, (DBObject) value, parent);
|
||||
return (T) read(type, (DBObject) value, path);
|
||||
} else {
|
||||
return (T) getPotentiallyConvertedSimpleRead(value, rawType);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> T potentiallyReadOrResolveDbRef(DBRef dbref, TypeInformation<?> type, ObjectPath path, Class<?> rawType) {
|
||||
|
||||
if (rawType.equals(DBRef.class)) {
|
||||
return (T) dbref;
|
||||
}
|
||||
|
||||
Object object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getRef());
|
||||
|
||||
if (object != null) {
|
||||
return (T) object;
|
||||
}
|
||||
|
||||
return (T) (object != null ? object : read(type, readRef(dbref), path));
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the fetch operation for the given {@link DBRef}.
|
||||
*
|
||||
@@ -1085,4 +1208,15 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
DBObject readRef(DBRef ref) {
|
||||
return ref.fetch();
|
||||
}
|
||||
|
||||
/**
|
||||
* Marker class used to indicate we have a non root document object here that might be used within an update - so we
|
||||
* need to preserve type hints for potential nested elements but need to remove it on top level.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class NestedDocument {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,6 +25,8 @@ import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.core.query.Term;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
@@ -33,15 +35,15 @@ import com.mongodb.DBObject;
|
||||
* Wrapper class to contain useful converters for the usage with Mongo.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
abstract class MongoConverters {
|
||||
|
||||
/**
|
||||
* Private constructor to prevent instantiation.
|
||||
*/
|
||||
private MongoConverters() {
|
||||
|
||||
}
|
||||
private MongoConverters() {}
|
||||
|
||||
/**
|
||||
* Simple singleton to convert {@link ObjectId}s to their {@link String} representation.
|
||||
@@ -161,4 +163,19 @@ abstract class MongoConverters {
|
||||
return source == null ? null : source.toString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum TermToStringConverter implements Converter<Term, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public String convert(Term source) {
|
||||
return source == null ? null : source.getFormatted();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* A path of objects nested into each other. The type allows access to all parent objects currently in creation even
|
||||
* when resolving more nested objects. This allows to avoid re-resolving object instances that are logically equivalent
|
||||
* to already resolved ones.
|
||||
* <p>
|
||||
* An immutable ordered set of target objects for {@link DBObject} to {@link Object} conversions. Object paths can be
|
||||
* constructed by the {@link #toObjectPath(Object)} method and extended via {@link #push(Object)}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.6
|
||||
*/
|
||||
class ObjectPath {
|
||||
|
||||
public static final ObjectPath ROOT = new ObjectPath();
|
||||
|
||||
private final List<ObjectPathItem> items;
|
||||
|
||||
private ObjectPath() {
|
||||
this.items = Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ObjectPath} from the given parent {@link ObjectPath} by adding the provided
|
||||
* {@link ObjectPathItem} to it.
|
||||
*
|
||||
* @param parent can be {@literal null}.
|
||||
* @param item
|
||||
*/
|
||||
private ObjectPath(ObjectPath parent, ObjectPath.ObjectPathItem item) {
|
||||
|
||||
List<ObjectPath.ObjectPathItem> items = new ArrayList<ObjectPath.ObjectPathItem>(parent.items);
|
||||
items.add(item);
|
||||
|
||||
this.items = Collections.unmodifiableList(items);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of the {@link ObjectPath} with the given {@link Object} as current object.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param id must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ObjectPath push(Object object, MongoPersistentEntity<?> entity, Object id) {
|
||||
|
||||
Assert.notNull(object, "Object must not be null!");
|
||||
Assert.notNull(entity, "MongoPersistentEntity must not be null!");
|
||||
|
||||
ObjectPathItem item = new ObjectPathItem(object, id, entity.getCollection());
|
||||
return new ObjectPath(this, item);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the object with the given id and stored in the given collection if it's contained in the {@link ObjectPath}
|
||||
* .
|
||||
*
|
||||
* @param id must not be {@literal null}.
|
||||
* @param collection must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public Object getPathItem(Object id, String collection) {
|
||||
|
||||
Assert.notNull(id, "Id must not be null!");
|
||||
Assert.hasText(collection, "Collection name must not be null!");
|
||||
|
||||
for (ObjectPathItem item : items) {
|
||||
|
||||
Object object = item.getObject();
|
||||
|
||||
if (object == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (item.getIdValue() == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (collection.equals(item.getCollection()) && id.equals(item.getIdValue())) {
|
||||
return object;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current object of the {@link ObjectPath} or {@literal null} if the path is empty.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Object getCurrentObject() {
|
||||
return items.isEmpty() ? null : items.get(items.size() - 1).getObject();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
if (items.isEmpty()) {
|
||||
return "[empty]";
|
||||
}
|
||||
|
||||
List<String> strings = new ArrayList<String>(items.size());
|
||||
|
||||
for (ObjectPathItem item : items) {
|
||||
strings.add(item.object.toString());
|
||||
}
|
||||
|
||||
return StringUtils.collectionToDelimitedString(strings, " -> ");
|
||||
}
|
||||
|
||||
/**
|
||||
* An item in an {@link ObjectPath}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class ObjectPathItem {
|
||||
|
||||
private final Object object;
|
||||
private final Object idValue;
|
||||
private final String collection;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ObjectPathItem}.
|
||||
*
|
||||
* @param object
|
||||
* @param idValue
|
||||
* @param collection
|
||||
*/
|
||||
ObjectPathItem(Object object, Object idValue, String collection) {
|
||||
|
||||
this.object = object;
|
||||
this.idValue = idValue;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
public Object getObject() {
|
||||
return object;
|
||||
}
|
||||
|
||||
public Object getIdValue() {
|
||||
return idValue;
|
||||
}
|
||||
|
||||
public String getCollection() {
|
||||
return collection;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,20 +17,30 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter.NestedDocument;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
@@ -44,10 +54,18 @@ import com.mongodb.DBRef;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final DBObject META_TEXT_SCORE = new BasicDBObject("$meta", "textScore");
|
||||
static final ClassTypeInformation<?> NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class);
|
||||
|
||||
private enum MetaMapping {
|
||||
FORCE, WHEN_PRESENT, IGNORE;
|
||||
}
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MongoConverter converter;
|
||||
@@ -101,22 +119,103 @@ public class QueryMapper {
|
||||
continue;
|
||||
}
|
||||
|
||||
Field field = entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
Entry<String, Object> entry = getMappedObjectForField(field, query.get(key));
|
||||
|
||||
Object rawValue = query.get(key);
|
||||
String newKey = field.getMappedKey();
|
||||
|
||||
if (isNestedKeyword(rawValue) && !field.isIdField()) {
|
||||
Keyword keyword = new Keyword((DBObject) rawValue);
|
||||
result.put(newKey, getMappedKeyword(field, keyword));
|
||||
} else {
|
||||
result.put(newKey, getMappedValue(field, rawValue));
|
||||
}
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps fields used for sorting to the {@link MongoPersistentEntity}s properties. <br />
|
||||
* Also converts properties to their {@code $meta} representation if present.
|
||||
*
|
||||
* @param sortObject
|
||||
* @param entity
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
public DBObject getMappedSort(DBObject sortObject, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (sortObject == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject mappedSort = getMappedObject(sortObject, entity);
|
||||
mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT);
|
||||
return mappedSort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps fields to retrieve to the {@link MongoPersistentEntity}s properties. <br />
|
||||
* Also onverts and potentially adds missing property {@code $meta} representation.
|
||||
*
|
||||
* @param fieldsObject
|
||||
* @param entity
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
public DBObject getMappedFields(DBObject fieldsObject, MongoPersistentEntity<?> entity) {
|
||||
|
||||
DBObject mappedFields = fieldsObject != null ? getMappedObject(fieldsObject, entity) : new BasicDBObject();
|
||||
mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE);
|
||||
return mappedFields.keySet().isEmpty() ? null : mappedFields;
|
||||
}
|
||||
|
||||
private void mapMetaAttributes(DBObject source, MongoPersistentEntity<?> entity, MetaMapping metaMapping) {
|
||||
|
||||
if (entity == null || source == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (entity.hasTextScoreProperty() && !MetaMapping.IGNORE.equals(metaMapping)) {
|
||||
MongoPersistentProperty textScoreProperty = entity.getTextScoreProperty();
|
||||
if (MetaMapping.FORCE.equals(metaMapping)
|
||||
|| (MetaMapping.WHEN_PRESENT.equals(metaMapping) && source.containsField(textScoreProperty.getFieldName()))) {
|
||||
source.putAll(getMappedTextScoreField(textScoreProperty));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private DBObject getMappedTextScoreField(MongoPersistentProperty property) {
|
||||
return new BasicDBObject(property.getFieldName(), META_TEXT_SCORE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the mapped object value for given field out of rawValue taking nested {@link Keyword}s into account
|
||||
*
|
||||
* @param field
|
||||
* @param rawValue
|
||||
* @return
|
||||
*/
|
||||
protected Entry<String, Object> getMappedObjectForField(Field field, Object rawValue) {
|
||||
|
||||
String key = field.getMappedKey();
|
||||
Object value;
|
||||
|
||||
if (isNestedKeyword(rawValue) && !field.isIdField()) {
|
||||
Keyword keyword = new Keyword((DBObject) rawValue);
|
||||
value = getMappedKeyword(field, keyword);
|
||||
} else {
|
||||
value = getMappedValue(field, rawValue);
|
||||
}
|
||||
|
||||
return createMapEntry(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param entity
|
||||
* @param key
|
||||
* @param mappingContext
|
||||
* @return
|
||||
*/
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link DBObject} representing a keyword by mapping the keyword's value.
|
||||
*
|
||||
@@ -124,7 +223,7 @@ public class QueryMapper {
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
private DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity<?> entity) {
|
||||
protected DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity<?> entity) {
|
||||
|
||||
// $or/$nor
|
||||
if (keyword.isOrOrNor() || keyword.hasIterableValue()) {
|
||||
@@ -133,7 +232,7 @@ public class QueryMapper {
|
||||
BasicDBList newConditions = new BasicDBList();
|
||||
|
||||
for (Object condition : conditions) {
|
||||
newConditions.add(condition instanceof DBObject ? getMappedObject((DBObject) condition, entity)
|
||||
newConditions.add(isDBObject(condition) ? getMappedObject((DBObject) condition, entity)
|
||||
: convertSimpleOrDBObject(condition, entity));
|
||||
}
|
||||
|
||||
@@ -150,12 +249,12 @@ public class QueryMapper {
|
||||
* @param keyword
|
||||
* @return
|
||||
*/
|
||||
private DBObject getMappedKeyword(Field property, Keyword keyword) {
|
||||
protected DBObject getMappedKeyword(Field property, Keyword keyword) {
|
||||
|
||||
boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists();
|
||||
Object value = keyword.getValue();
|
||||
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property.getProperty())
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property)
|
||||
: getMappedValue(property.with(keyword.getKey()), value);
|
||||
|
||||
return new BasicDBObject(keyword.key, convertedValue);
|
||||
@@ -170,26 +269,28 @@ public class QueryMapper {
|
||||
* @param newKey the key the value will be bound to eventually
|
||||
* @return
|
||||
*/
|
||||
private Object getMappedValue(Field documentField, Object value) {
|
||||
protected Object getMappedValue(Field documentField, Object value) {
|
||||
|
||||
if (documentField.isIdField()) {
|
||||
|
||||
if (value instanceof DBObject) {
|
||||
if (isDBObject(value)) {
|
||||
DBObject valueDbo = (DBObject) value;
|
||||
DBObject resultDbo = new BasicDBObject(valueDbo.toMap());
|
||||
|
||||
if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
valueDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
resultDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
} else if (valueDbo.containsField("$ne")) {
|
||||
valueDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject((DBObject) value, null);
|
||||
return getMappedObject(resultDbo, null);
|
||||
}
|
||||
|
||||
return valueDbo;
|
||||
return resultDbo;
|
||||
|
||||
} else {
|
||||
return convertId(value);
|
||||
@@ -200,13 +301,47 @@ public class QueryMapper {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
}
|
||||
|
||||
if (documentField.isAssociation()) {
|
||||
return convertAssociation(value, documentField.getProperty());
|
||||
if (isAssociationConversionNecessary(documentField, value)) {
|
||||
return convertAssociation(value, documentField);
|
||||
}
|
||||
|
||||
return convertSimpleOrDBObject(value, documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Field} represents an association reference that together with the given value
|
||||
* requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the
|
||||
* type of the given value is compatible with the type of the given document field in order to deal with potential
|
||||
* query field exclusions, since MongoDB uses the {@code int} {@literal 0} as an indicator for an excluded field.
|
||||
*
|
||||
* @param documentField must not be {@literal null}.
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
protected boolean isAssociationConversionNecessary(Field documentField, Object value) {
|
||||
|
||||
Assert.notNull(documentField, "Document field must not be null!");
|
||||
|
||||
if (value == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!documentField.isAssociation()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Class<? extends Object> type = value.getClass();
|
||||
MongoPersistentProperty property = documentField.getProperty();
|
||||
|
||||
if (property.getActualType().isAssignableFrom(type)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = documentField.getPropertyEntity();
|
||||
return entity.hasIdProperty()
|
||||
&& (type.equals(DBRef.class) || entity.getIdProperty().getActualType().isAssignableFrom(type));
|
||||
}
|
||||
|
||||
/**
|
||||
* Retriggers mapping if the given source is a {@link DBObject} or simply invokes the
|
||||
*
|
||||
@@ -214,13 +349,13 @@ public class QueryMapper {
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
private Object convertSimpleOrDBObject(Object source, MongoPersistentEntity<?> entity) {
|
||||
protected Object convertSimpleOrDBObject(Object source, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof BasicDBList) {
|
||||
return delegateConvertToMongoType(source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof DBObject) {
|
||||
if (isDBObject(source)) {
|
||||
return getMappedObject((DBObject) source, entity);
|
||||
}
|
||||
|
||||
@@ -236,7 +371,11 @@ public class QueryMapper {
|
||||
* @return the converted mongo type or null if source is null
|
||||
*/
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return converter.convertToMongoType(source);
|
||||
return converter.convertToMongoType(source, entity == null ? null : entity.getTypeInformation());
|
||||
}
|
||||
|
||||
protected Object convertAssociation(Object source, Field field) {
|
||||
return convertAssociation(source, field.getProperty());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -246,16 +385,22 @@ public class QueryMapper {
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
private Object convertAssociation(Object source, MongoPersistentProperty property) {
|
||||
protected Object convertAssociation(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (property == null || !property.isAssociation()) {
|
||||
if (property == null || source == null || source instanceof DBObject) {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
|
||||
DBRef ref = (DBRef) source;
|
||||
return new DBRef(ref.getDB(), ref.getRef(), convertId(ref.getId()));
|
||||
}
|
||||
|
||||
if (source instanceof Iterable) {
|
||||
BasicDBList result = new BasicDBList();
|
||||
for (Object element : (Iterable<?>) source) {
|
||||
result.add(element instanceof DBRef ? element : converter.toDBRef(element, property));
|
||||
result.add(createDbRefFor(element, property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -264,13 +409,55 @@ public class QueryMapper {
|
||||
BasicDBObject result = new BasicDBObject();
|
||||
DBObject dbObject = (DBObject) source;
|
||||
for (String key : dbObject.keySet()) {
|
||||
Object o = dbObject.get(key);
|
||||
result.put(key, o instanceof DBRef ? o : converter.toDBRef(o, property));
|
||||
result.put(key, createDbRefFor(dbObject.get(key), property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return source == null || source instanceof DBRef ? source : converter.toDBRef(source, property);
|
||||
return createDbRefFor(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the given value is a {@link DBObject}.
|
||||
*
|
||||
* @param value can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected final boolean isDBObject(Object value) {
|
||||
return value instanceof DBObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Entry} for the given {@link Field} with the given value.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @param value can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected final Entry<String, Object> createMapEntry(Field field, Object value) {
|
||||
return createMapEntry(field.getMappedKey(), value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Entry} with the given key and value.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param value can be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private Entry<String, Object> createMapEntry(String key, Object value) {
|
||||
|
||||
Assert.hasText(key, "Key must not be null or empty!");
|
||||
return Collections.singletonMap(key, value).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
private DBRef createDbRefFor(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
return (DBRef) source;
|
||||
}
|
||||
|
||||
return converter.toDBRef(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -281,13 +468,20 @@ public class QueryMapper {
|
||||
*/
|
||||
public Object convertId(Object id) {
|
||||
|
||||
try {
|
||||
return conversionService.convert(id, ObjectId.class);
|
||||
} catch (ConversionException e) {
|
||||
// Ignore
|
||||
if (id == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegateConvertToMongoType(id, null);
|
||||
if (id instanceof String) {
|
||||
return ObjectId.isValid(id.toString()) ? conversionService.convert(id, ObjectId.class) : id;
|
||||
}
|
||||
|
||||
try {
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService.convert(id, ObjectId.class)
|
||||
: delegateConvertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -328,7 +522,7 @@ public class QueryMapper {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class Keyword {
|
||||
static class Keyword {
|
||||
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
@@ -381,7 +575,7 @@ public class QueryMapper {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class Field {
|
||||
protected static class Field {
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
|
||||
@@ -418,7 +612,9 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying {@link MongoPersistentProperty} backing the field.
|
||||
* Returns the underlying {@link MongoPersistentProperty} backing the field. For path traversals this will be the
|
||||
* property that represents the value to handle. This means it'll be the leaf property for plain paths or the
|
||||
* association property in case we refer to an association somewhere in the path.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@@ -452,18 +648,40 @@ public class QueryMapper {
|
||||
public String getMappedKey() {
|
||||
return isIdField() ? ID_KEY : name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field references an association in case it refers to a nested field.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean containsAssociation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Association<MongoPersistentProperty> getAssociation() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public TypeInformation<?> getTypeHint() {
|
||||
return ClassTypeInformation.OBJECT;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension of {@link DocumentField} to be backed with mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class MetadataBackedField extends Field {
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!";
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistentPropertyPath<MongoPersistentProperty> path;
|
||||
private final Association<MongoPersistentProperty> association;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and
|
||||
@@ -475,6 +693,21 @@ public class QueryMapper {
|
||||
*/
|
||||
public MetadataBackedField(String name, MongoPersistentEntity<?> entity,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
this(name, entity, context, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and
|
||||
* {@link MappingContext} with the given {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @param property may be {@literal null}.
|
||||
*/
|
||||
public MetadataBackedField(String name, MongoPersistentEntity<?> entity,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
MongoPersistentProperty property) {
|
||||
|
||||
super(name);
|
||||
|
||||
@@ -483,8 +716,9 @@ public class QueryMapper {
|
||||
this.entity = entity;
|
||||
this.mappingContext = context;
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
this.property = path == null ? null : path.getLeafProperty();
|
||||
this.path = getPath(name);
|
||||
this.property = path == null ? property : path.getLeafProperty();
|
||||
this.association = findAssociation();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -493,7 +727,7 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public MetadataBackedField with(String name) {
|
||||
return new MetadataBackedField(name, entity, mappingContext);
|
||||
return new MetadataBackedField(name, entity, mappingContext, property);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -518,7 +752,7 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public MongoPersistentProperty getProperty() {
|
||||
return property;
|
||||
return association == null ? property : association.getInverse();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -537,9 +771,34 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public boolean isAssociation() {
|
||||
return association != null;
|
||||
}
|
||||
|
||||
MongoPersistentProperty property = getProperty();
|
||||
return property == null ? false : property.isAssociation();
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getAssociation()
|
||||
*/
|
||||
@Override
|
||||
public Association<MongoPersistentProperty> getAssociation() {
|
||||
return association;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the association property in the {@link PersistentPropertyPath}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private final Association<MongoPersistentProperty> findAssociation() {
|
||||
|
||||
if (this.path != null) {
|
||||
for (MongoPersistentProperty p : this.path) {
|
||||
if (p.isAssociation()) {
|
||||
return p.getAssociation();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -548,19 +807,129 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public String getMappedKey() {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
return path == null ? name : path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE);
|
||||
return path == null ? name : path.toDotPath(isAssociation() ? getAssociationConverter() : getPropertyConverter());
|
||||
}
|
||||
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String name) {
|
||||
protected PersistentPropertyPath<MongoPersistentProperty> getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link PersistentPropertyPath} for the given <code>pathExpression</code>.
|
||||
*
|
||||
* @param pathExpression
|
||||
* @return
|
||||
*/
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
try {
|
||||
PropertyPath path = PropertyPath.from(name, entity.getTypeInformation());
|
||||
return mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
PropertyPath path = PropertyPath.from(pathExpression.replaceAll("\\.\\d", ""), entity.getTypeInformation());
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
boolean associationDetected = false;
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
|
||||
MongoPersistentProperty property = iterator.next();
|
||||
|
||||
if (property.isAssociation()) {
|
||||
associationDetected = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (associationDetected && !property.isIdProperty()) {
|
||||
throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression));
|
||||
}
|
||||
}
|
||||
|
||||
return propertyPath;
|
||||
} catch (PropertyReferenceException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Converter} to be used to created the mapped key. Default implementation will use
|
||||
* {@link PropertyToFieldNameConverter}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Converter} to use for creating the mapped key of an association. Default implementation is
|
||||
* {@link AssociationConverter}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new AssociationConverter(getAssociation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTypeHint()
|
||||
*/
|
||||
@Override
|
||||
public TypeInformation<?> getTypeHint() {
|
||||
|
||||
MongoPersistentProperty property = getProperty();
|
||||
|
||||
if (property == null) {
|
||||
return super.getTypeHint();
|
||||
}
|
||||
|
||||
if (property.getActualType().isInterface()
|
||||
|| java.lang.reflect.Modifier.isAbstract(property.getActualType().getModifiers())) {
|
||||
return ClassTypeInformation.OBJECT;
|
||||
}
|
||||
|
||||
return NESTED_DOCUMENT;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converter to skip all properties after an association property was rendered.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
protected static class AssociationConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private boolean associationFound;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public AssociationConverter(Association<MongoPersistentProperty> association) {
|
||||
|
||||
Assert.notNull(association, "Association must not be null!");
|
||||
this.property = association.getInverse();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
|
||||
if (associationFound) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (property.equals(source)) {
|
||||
associationFound = true;
|
||||
}
|
||||
|
||||
return source.getFieldName();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,16 +15,36 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifier;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifiers;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* A subclass of {@link QueryMapper} that retains type information on the mongo types.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class UpdateMapper extends QueryMapper {
|
||||
|
||||
private final MongoWriter<?> converter;
|
||||
private final MongoConverter converter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdateMapper} using the given {@link MongoConverter}.
|
||||
@@ -46,7 +66,287 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
return converter.convertToMongoType(source,
|
||||
entity == null ? ClassTypeInformation.OBJECT : getTypeHintForEntity(source, entity));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#getMappedObjectForField(org.springframework.data.mongodb.core.convert.QueryMapper.Field, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Entry<String, Object> getMappedObjectForField(Field field, Object rawValue) {
|
||||
|
||||
if (isDBObject(rawValue)) {
|
||||
return createMapEntry(field, convertSimpleOrDBObject(rawValue, field.getPropertyEntity()));
|
||||
}
|
||||
|
||||
if (isQuery(rawValue)) {
|
||||
return createMapEntry(field,
|
||||
super.getMappedObject(((Query) rawValue).getQueryObject(), field.getPropertyEntity()));
|
||||
}
|
||||
|
||||
if (isUpdateModifier(rawValue)) {
|
||||
return getMappedUpdateModifier(field, rawValue);
|
||||
}
|
||||
|
||||
return super.getMappedObjectForField(field, rawValue);
|
||||
}
|
||||
|
||||
private Entry<String, Object> getMappedUpdateModifier(Field field, Object rawValue) {
|
||||
Object value = null;
|
||||
|
||||
if (rawValue instanceof Modifier) {
|
||||
|
||||
value = getMappedValue(field, (Modifier) rawValue);
|
||||
|
||||
} else if (rawValue instanceof Modifiers) {
|
||||
|
||||
DBObject modificationOperations = new BasicDBObject();
|
||||
|
||||
for (Modifier modifier : ((Modifiers) rawValue).getModifiers()) {
|
||||
modificationOperations.putAll(getMappedValue(field, modifier).toMap());
|
||||
}
|
||||
|
||||
value = modificationOperations;
|
||||
} else {
|
||||
throw new IllegalArgumentException(String.format("Unable to map value of type '%s'!", rawValue.getClass()));
|
||||
}
|
||||
|
||||
return createMapEntry(field, value);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#isAssociationConversionNecessary(org.springframework.data.mongodb.core.convert.QueryMapper.Field, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isAssociationConversionNecessary(Field documentField, Object value) {
|
||||
return super.isAssociationConversionNecessary(documentField, value) || documentField.containsAssociation();
|
||||
}
|
||||
|
||||
private boolean isUpdateModifier(Object value) {
|
||||
return value instanceof Modifier || value instanceof Modifiers;
|
||||
}
|
||||
|
||||
private boolean isQuery(Object value) {
|
||||
return value instanceof Query;
|
||||
}
|
||||
|
||||
private DBObject getMappedValue(Field field, Modifier modifier) {
|
||||
|
||||
TypeInformation<?> typeHint = field == null ? ClassTypeInformation.OBJECT : field.getTypeHint();
|
||||
|
||||
Object value = converter.convertToMongoType(modifier.getValue(), typeHint);
|
||||
return new BasicDBObject(modifier.getKey(), value);
|
||||
}
|
||||
|
||||
private TypeInformation<?> getTypeHintForEntity(Object source, MongoPersistentEntity<?> entity) {
|
||||
return processTypeHintForNestedDocuments(source, entity.getTypeInformation());
|
||||
}
|
||||
|
||||
private TypeInformation<?> processTypeHintForNestedDocuments(Object source, TypeInformation<?> info) {
|
||||
|
||||
Class<?> type = info.getActualType().getType();
|
||||
if (type.isInterface() || java.lang.reflect.Modifier.isAbstract(type.getModifiers())) {
|
||||
return info;
|
||||
}
|
||||
|
||||
if (!type.equals(source.getClass())) {
|
||||
return info;
|
||||
}
|
||||
return NESTED_DOCUMENT;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext)
|
||||
*/
|
||||
@Override
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext)
|
||||
: new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MetadataBackedField} that handles {@literal $} paths inside a field key. We clean up an update key
|
||||
* containing a {@literal $} before handing it to the super class to make sure property lookups and transformations
|
||||
* continue to work as expected. We provide a custom property converter to re-applied the cleaned up {@literal $}s
|
||||
* when constructing the mapped key.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class MetadataBackedUpdateField extends MetadataBackedField {
|
||||
|
||||
private final String key;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given {@link MongoPersistentEntity}, key and
|
||||
* {@link MappingContext}. We clean up the key before handing it up to the super class to make sure it continues to
|
||||
* work as expected.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$", ""), entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getMappedKey()
|
||||
*/
|
||||
@Override
|
||||
public String getMappedKey() {
|
||||
return this.getPath() == null ? key : super.getMappedKey();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getPropertyConverter()
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new UpdatePropertyConverter(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getAssociationConverter()
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new UpdateAssociationConverter(getAssociation(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Special mapper handling positional parameter {@literal $} within property names.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
*/
|
||||
private static class UpdateKeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
protected UpdateKeyMapper(String rawKey) {
|
||||
|
||||
Assert.hasText(rawKey, "Key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(rawKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
|
||||
boolean inspect = iterator.hasNext();
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
|
||||
boolean isPositional = isPositionalParameter(partial);
|
||||
if (isPositional) {
|
||||
mappedName += "." + partial;
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
}
|
||||
|
||||
return mappedName;
|
||||
}
|
||||
|
||||
boolean isPositionalParameter(String partial) {
|
||||
|
||||
if (partial.equals("$")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
Long.valueOf(partial);
|
||||
return true;
|
||||
} catch (NumberFormatException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Special {@link Converter} for {@link MongoPersistentProperty} instances that will concatenate the {@literal $}
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
*
|
||||
* @param updateKey must not be {@literal null} or empty.
|
||||
*/
|
||||
public UpdatePropertyConverter(String updateKey) {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.mapper = new UpdateKeyMapper(updateKey);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
return mapper.mapPropertyName(property);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} retaining positional parameter {@literal $} for {@link Association}s.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
protected static class UpdateAssociationConverter extends AssociationConverter {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public UpdateAssociationConverter(Association<MongoPersistentProperty> association, String key) {
|
||||
|
||||
super(association);
|
||||
this.mapper = new UpdateKeyMapper(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
return super.convert(source) == null ? null : mapper.mapPropertyName(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Internal API to trigger the resolution of properties.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
interface ValueResolver {
|
||||
|
||||
/**
|
||||
* Resolves the value for the given {@link MongoPersistentProperty} within the given {@link DBObject} using the given
|
||||
* {@link SpELExpressionEvaluator} and {@link ObjectPath}.
|
||||
*
|
||||
* @param prop
|
||||
* @param dbo
|
||||
* @param evaluator
|
||||
* @param parent
|
||||
* @return
|
||||
*/
|
||||
Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator evaluator,
|
||||
ObjectPath parent);
|
||||
}
|
||||
@@ -1,106 +0,0 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Represents a geospatial box value
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class Box implements Shape {
|
||||
|
||||
@Field(order = 10)
|
||||
private final Point first;
|
||||
@Field(order = 20)
|
||||
private final Point second;
|
||||
|
||||
public Box(Point lowerLeft, Point upperRight) {
|
||||
Assert.notNull(lowerLeft);
|
||||
Assert.notNull(upperRight);
|
||||
this.first = lowerLeft;
|
||||
this.second = upperRight;
|
||||
}
|
||||
|
||||
public Box(double[] lowerLeft, double[] upperRight) {
|
||||
Assert.isTrue(lowerLeft.length == 2, "Point array has to have 2 elements!");
|
||||
Assert.isTrue(upperRight.length == 2, "Point array has to have 2 elements!");
|
||||
this.first = new Point(lowerLeft[0], lowerLeft[1]);
|
||||
this.second = new Point(upperRight[0], upperRight[1]);
|
||||
}
|
||||
|
||||
public Point getLowerLeft() {
|
||||
return first;
|
||||
}
|
||||
|
||||
public Point getUpperRight() {
|
||||
return second;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.Shape#asList()
|
||||
*/
|
||||
public List<? extends Object> asList() {
|
||||
List<List<Double>> list = new ArrayList<List<Double>>();
|
||||
list.add(getLowerLeft().asList());
|
||||
list.add(getUpperRight().asList());
|
||||
return list;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.Shape#getCommand()
|
||||
*/
|
||||
public String getCommand() {
|
||||
return "$box";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("Box [%s, %s]", first, second);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 31;
|
||||
result += 17 * first.hashCode();
|
||||
result += 17 * second.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Box that = (Box) obj;
|
||||
return this.first.equals(that.first) && this.second.equals(that.second);
|
||||
}
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Represents a geospatial circle value
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class Circle implements Shape {
|
||||
|
||||
private final Point center;
|
||||
private final double radius;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Circle} from the given {@link Point} and radius.
|
||||
*
|
||||
* @param center must not be {@literal null}.
|
||||
* @param radius must be greater or equal to zero.
|
||||
*/
|
||||
@PersistenceConstructor
|
||||
public Circle(Point center, double radius) {
|
||||
|
||||
Assert.notNull(center);
|
||||
Assert.isTrue(radius >= 0, "Radius must not be negative!");
|
||||
|
||||
this.center = center;
|
||||
this.radius = radius;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Circle} from the given coordinates and radius.
|
||||
*
|
||||
* @param centerX
|
||||
* @param centerY
|
||||
* @param radius must be greater or equal to zero.
|
||||
*/
|
||||
public Circle(double centerX, double centerY, double radius) {
|
||||
this(new Point(centerX, centerY), radius);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the center of the {@link Circle}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
public Point getCenter() {
|
||||
return center;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the radius of the {@link Circle}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public double getRadius() {
|
||||
return radius;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.Shape#asList()
|
||||
*/
|
||||
public List<Object> asList() {
|
||||
List<Object> result = new ArrayList<Object>();
|
||||
result.add(getCenter().asList());
|
||||
result.add(getRadius());
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.Shape#getCommand()
|
||||
*/
|
||||
public String getCommand() {
|
||||
return "$center";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("Circle [center=%s, radius=%f]", center, radius);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || !getClass().equals(obj.getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Circle that = (Circle) obj;
|
||||
|
||||
return this.center.equals(that.center) && this.radius == that.radius;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 17;
|
||||
result += 31 * center.hashCode();
|
||||
result += 31 * radius;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Value object to represent distances in a given metric.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class Distance {
|
||||
|
||||
private final double value;
|
||||
private final Metric metric;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Distance}.
|
||||
*
|
||||
* @param value
|
||||
*/
|
||||
public Distance(double value) {
|
||||
this(value, Metrics.NEUTRAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Distance} with the given {@link Metric}.
|
||||
*
|
||||
* @param value
|
||||
* @param metric
|
||||
*/
|
||||
public Distance(double value, Metric metric) {
|
||||
this.value = value;
|
||||
this.metric = metric == null ? Metrics.NEUTRAL : metric;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the value
|
||||
*/
|
||||
public double getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the normalized value regarding the underlying {@link Metric}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public double getNormalizedValue() {
|
||||
return value / metric.getMultiplier();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the metric
|
||||
*/
|
||||
public Metric getMetric() {
|
||||
return metric;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given distance to the current one. The resulting {@link Distance} will be in the same metric as the
|
||||
* current one.
|
||||
*
|
||||
* @param other
|
||||
* @return
|
||||
*/
|
||||
public Distance add(Distance other) {
|
||||
double newNormalizedValue = getNormalizedValue() + other.getNormalizedValue();
|
||||
return new Distance(newNormalizedValue * metric.getMultiplier(), metric);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given {@link Distance} to the current one and forces the result to be in a given {@link Metric}.
|
||||
*
|
||||
* @param other
|
||||
* @param metric
|
||||
* @return
|
||||
*/
|
||||
public Distance add(Distance other, Metric metric) {
|
||||
double newLeft = getNormalizedValue() * metric.getMultiplier();
|
||||
double newRight = other.getNormalizedValue() * metric.getMultiplier();
|
||||
return new Distance(newLeft + newRight, metric);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || !getClass().equals(obj.getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Distance that = (Distance) obj;
|
||||
|
||||
return this.value == that.value && ObjectUtils.nullSafeEquals(this.metric, that.metric);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 17;
|
||||
result += 31 * Double.doubleToLongBits(value);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(metric);
|
||||
return result;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append(value);
|
||||
|
||||
if (metric != Metrics.NEUTRAL) {
|
||||
builder.append(" ").append(metric.toString());
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
/**
|
||||
* Custom {@link Page} to carry the average distance retrieved from the {@link GeoResults} the {@link GeoPage} is set up
|
||||
* from.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class GeoPage<T> extends PageImpl<GeoResult<T>> {
|
||||
|
||||
private static final long serialVersionUID = 23421312312412L;
|
||||
private final Distance averageDistance;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoPage} from the given {@link GeoResults}.
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
*/
|
||||
public GeoPage(GeoResults<T> results) {
|
||||
super(results.getContent());
|
||||
this.averageDistance = results.getAverageDistance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoPage} from the given {@link GeoResults}, {@link Pageable} and total.
|
||||
*
|
||||
* @param results must not be {@literal null}.
|
||||
* @param pageable must not be {@literal null}.
|
||||
* @param total
|
||||
*/
|
||||
public GeoPage(GeoResults<T> results, Pageable pageable, long total) {
|
||||
super(results.getContent(), pageable, total);
|
||||
this.averageDistance = results.getAverageDistance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the average distance of the underlying results.
|
||||
*
|
||||
* @return the averageDistance
|
||||
*/
|
||||
public Distance getAverageDistance() {
|
||||
return averageDistance;
|
||||
}
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Calue object capturing some arbitrary object plus a distance.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class GeoResult<T> {
|
||||
|
||||
private final T content;
|
||||
private final Distance distance;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoResult} for the given content and distance.
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param distance must not be {@literal null}.
|
||||
*/
|
||||
public GeoResult(T content, Distance distance) {
|
||||
Assert.notNull(content);
|
||||
Assert.notNull(distance);
|
||||
this.content = content;
|
||||
this.distance = distance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the actual content object.
|
||||
*
|
||||
* @return the content
|
||||
*/
|
||||
public T getContent() {
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the distance the actual content object has from the origin.
|
||||
*
|
||||
* @return the distance
|
||||
*/
|
||||
public Distance getDistance() {
|
||||
return distance;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || !getClass().equals(obj.getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GeoResult<?> that = (GeoResult<?>) obj;
|
||||
|
||||
return this.content.equals(that.content) && this.distance.equals(that.distance);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
result += 31 * distance.hashCode();
|
||||
result += 31 * content.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("GeoResult [content: %s, distance: %s, ]", content.toString(), distance.toString());
|
||||
}
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Value object to capture {@link GeoResult}s as well as the average distance they have.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class GeoResults<T> implements Iterable<GeoResult<T>> {
|
||||
|
||||
private final List<GeoResult<T>> results;
|
||||
private final Distance averageDistance;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoResults} instance manually calculating the average distance from the distance values of the
|
||||
* given {@link GeoResult}s.
|
||||
*
|
||||
* @param results must not be {@literal null}.
|
||||
*/
|
||||
public GeoResults(List<GeoResult<T>> results) {
|
||||
this(results, (Metric) null);
|
||||
}
|
||||
|
||||
public GeoResults(List<GeoResult<T>> results, Metric metric) {
|
||||
this(results, calculateAverageDistance(results, metric));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoResults} instance from the given {@link GeoResult}s and average distance.
|
||||
*
|
||||
* @param results must not be {@literal null}.
|
||||
* @param averageDistance
|
||||
*/
|
||||
@PersistenceConstructor
|
||||
public GeoResults(List<GeoResult<T>> results, Distance averageDistance) {
|
||||
Assert.notNull(results);
|
||||
this.results = results;
|
||||
this.averageDistance = averageDistance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the average distance of all {@link GeoResult}s in this list.
|
||||
*
|
||||
* @return the averageDistance
|
||||
*/
|
||||
public Distance getAverageDistance() {
|
||||
return averageDistance;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
public Iterator<GeoResult<T>> iterator() {
|
||||
return results.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the actual
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public List<GeoResult<T>> getContent() {
|
||||
return Collections.unmodifiableList(results);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || !getClass().equals(obj.getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GeoResults<?> that = (GeoResults<?>) obj;
|
||||
|
||||
return this.results.equals(that.results) && this.averageDistance == that.averageDistance;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 17;
|
||||
result += 31 * results.hashCode();
|
||||
result += 31 * averageDistance.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("GeoResults: [averageDistance: %s, results: %s]", averageDistance.toString(),
|
||||
StringUtils.collectionToCommaDelimitedString(results));
|
||||
}
|
||||
|
||||
private static Distance calculateAverageDistance(List<? extends GeoResult<?>> results, Metric metric) {
|
||||
|
||||
if (results.isEmpty()) {
|
||||
return new Distance(0, metric);
|
||||
}
|
||||
|
||||
double averageDistance = 0;
|
||||
|
||||
for (GeoResult<?> result : results) {
|
||||
averageDistance += result.getDistance().getValue();
|
||||
}
|
||||
|
||||
return new Distance(averageDistance / results.size(), metric);
|
||||
}
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
/**
|
||||
* Interface for {@link Metric}s that can be applied to a base scale.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface Metric {
|
||||
|
||||
/**
|
||||
* Returns the multiplier to calculate metrics values from a base scale.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
double getMultiplier();
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
|
||||
/**
|
||||
* Commonly used {@link Metrics} for {@link NearQuery}s.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public enum Metrics implements Metric {
|
||||
|
||||
KILOMETERS(6378.137), MILES(3963.191), NEUTRAL(1);
|
||||
|
||||
private final double multiplier;
|
||||
|
||||
private Metrics(double multiplier) {
|
||||
this.multiplier = multiplier;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.Metric#getMultiplier()
|
||||
*/
|
||||
public double getMultiplier() {
|
||||
return multiplier;
|
||||
}
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Represents a geospatial point value.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class Point {
|
||||
|
||||
@Field(order = 10)
|
||||
private final double x;
|
||||
@Field(order = 20)
|
||||
private final double y;
|
||||
|
||||
@PersistenceConstructor
|
||||
public Point(double x, double y) {
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
}
|
||||
|
||||
public Point(Point point) {
|
||||
Assert.notNull(point);
|
||||
this.x = point.x;
|
||||
this.y = point.y;
|
||||
}
|
||||
|
||||
public double getX() {
|
||||
return x;
|
||||
}
|
||||
|
||||
public double getY() {
|
||||
return y;
|
||||
}
|
||||
|
||||
public double[] asArray() {
|
||||
return new double[] { x, y };
|
||||
}
|
||||
|
||||
public List<Double> asList() {
|
||||
return Arrays.asList(x, y);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
long temp;
|
||||
temp = Double.doubleToLongBits(x);
|
||||
result = prime * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(y);
|
||||
result = prime * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Point other = (Point) obj;
|
||||
if (Double.doubleToLongBits(x) != Double.doubleToLongBits(other.x)) {
|
||||
return false;
|
||||
}
|
||||
if (Double.doubleToLongBits(y) != Double.doubleToLongBits(other.y)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("Point [latitude=%f, longitude=%f]", x, y);
|
||||
}
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Simple value object to represent a {@link Polygon}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class Polygon implements Shape, Iterable<Point> {
|
||||
|
||||
private final List<Point> points;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Polygon} for the given Points.
|
||||
*
|
||||
* @param x
|
||||
* @param y
|
||||
* @param z
|
||||
* @param others
|
||||
*/
|
||||
public Polygon(Point x, Point y, Point z, Point... others) {
|
||||
|
||||
Assert.notNull(x);
|
||||
Assert.notNull(y);
|
||||
Assert.notNull(z);
|
||||
Assert.notNull(others);
|
||||
|
||||
this.points = new ArrayList<Point>(3 + others.length);
|
||||
this.points.addAll(Arrays.asList(x, y, z));
|
||||
this.points.addAll(Arrays.asList(others));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.Shape#asList()
|
||||
*/
|
||||
public List<List<Double>> asList() {
|
||||
|
||||
List<List<Double>> result = new ArrayList<List<Double>>();
|
||||
|
||||
for (Point point : points) {
|
||||
result.add(point.asList());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.Shape#getCommand()
|
||||
*/
|
||||
public String getCommand() {
|
||||
return "$polygon";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
public Iterator<Point> iterator() {
|
||||
return this.points.iterator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || !getClass().equals(obj.getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Polygon that = (Polygon) obj;
|
||||
|
||||
return this.points.equals(that.points);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return points.hashCode();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,152 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.geo.Circle;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Represents a geospatial sphere value.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public class Sphere implements Shape {
|
||||
|
||||
public static final String COMMAND = "$centerSphere";
|
||||
private final Point center;
|
||||
private final Distance radius;
|
||||
|
||||
/**
|
||||
* Creates a Sphere around the given center {@link Point} with the given radius.
|
||||
*
|
||||
* @param center must not be {@literal null}.
|
||||
* @param radius must not be {@literal null}.
|
||||
*/
|
||||
@PersistenceConstructor
|
||||
public Sphere(Point center, Distance radius) {
|
||||
|
||||
Assert.notNull(center);
|
||||
Assert.notNull(radius);
|
||||
Assert.isTrue(radius.getValue() >= 0, "Radius must not be negative!");
|
||||
|
||||
this.center = center;
|
||||
this.radius = radius;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Sphere around the given center {@link Point} with the given radius.
|
||||
*
|
||||
* @param center
|
||||
* @param radius
|
||||
*/
|
||||
public Sphere(Point center, double radius) {
|
||||
this(center, new Distance(radius));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Sphere from the given {@link Circle}.
|
||||
*
|
||||
* @param circle
|
||||
*/
|
||||
public Sphere(Circle circle) {
|
||||
this(circle.getCenter(), circle.getRadius());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the center of the {@link Circle}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
public Point getCenter() {
|
||||
return new Point(this.center);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the radius of the {@link Circle}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Distance getRadius() {
|
||||
return radius;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("Sphere [center=%s, radius=%s]", center, radius);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || !(obj instanceof Sphere)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Sphere that = (Sphere) obj;
|
||||
|
||||
return this.center.equals(that.center) && this.radius.equals(that.radius);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 17;
|
||||
result += 31 * center.hashCode();
|
||||
result += 31 * radius.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link Shape} as a list of usually {@link Double} or {@link List}s of {@link Double}s. Wildcard bound
|
||||
* to allow implementations to return a more concrete element type.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public List<? extends Object> asList() {
|
||||
return Arrays.asList(Arrays.asList(center.getX(), center.getY()), this.radius.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the command to be used to create the {@literal $within} criterion.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getCommand() {
|
||||
return COMMAND;
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,18 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/**
|
||||
* Support for MongoDB geo-spatial queries.
|
||||
*/
|
||||
|
||||
@@ -36,11 +36,12 @@ public @interface CompoundIndex {
|
||||
|
||||
/**
|
||||
* The actual index definition in JSON format. The keys of the JSON document are the fields to be indexed, the values
|
||||
* define the index direction (1 for ascending, -1 for descending).
|
||||
* define the index direction (1 for ascending, -1 for descending). <br />
|
||||
* If left empty on nested document, the whole document will be indexed.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String def();
|
||||
String def() default "";
|
||||
|
||||
/**
|
||||
* It does not actually make sense to use that attribute as the direction has to be defined in the {@link #def()}
|
||||
@@ -51,25 +52,87 @@ public @interface CompoundIndex {
|
||||
@Deprecated
|
||||
IndexDirection direction() default IndexDirection.ASCENDING;
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-unique/
|
||||
* @return
|
||||
*/
|
||||
boolean unique() default false;
|
||||
|
||||
/**
|
||||
* If set to true index will skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-sparse/
|
||||
* @return
|
||||
*/
|
||||
boolean sparse() default false;
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping
|
||||
* @return
|
||||
*/
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* The name of the index to be created.
|
||||
* The name of the index to be created. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* @CompoundIndex(name = "compound_index", def = "{'h1': 1, 'h2': 1}")
|
||||
* class Hybrid {
|
||||
* String h1, h2;
|
||||
* }
|
||||
*
|
||||
* @CompoundIndex(name = "compound_index", def = "{'n1': 1, 'n2': 1}")
|
||||
* class Nested {
|
||||
* String n1, n2;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: 1, hybrid.h2: 1 } , { name: "hybrid.compound_index" } )
|
||||
* db.root.ensureIndex( { nested.n1: 1, nested.n2: 1 } , { name: "nested.compound_index" } )
|
||||
* db.hybrid.ensureIndex( { h1: 1, h2: 1 } , { name: "compound_index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults
|
||||
* to {@literal false}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
boolean useGeneratedName() default false;
|
||||
|
||||
/**
|
||||
* The collection the index will be created in. Will default to the collection the annotated domain class will be
|
||||
* stored in.
|
||||
*
|
||||
* @return
|
||||
* @deprecated The collection name is derived from the domain type. Fixing the collection via this attribute might
|
||||
* result in broken definitions. Will be removed in 1.7.
|
||||
*/
|
||||
@Deprecated
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
@@ -80,11 +143,4 @@ public @interface CompoundIndex {
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
/**
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/tutorial/expire-data/
|
||||
* @return
|
||||
*/
|
||||
int expireAfterSeconds() default -1;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Index definition to span multiple keys.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public class CompoundIndexDefinition extends Index {
|
||||
|
||||
private DBObject keys;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CompoundIndexDefinition} for the given keys.
|
||||
*
|
||||
* @param keys must not be {@literal null}.
|
||||
*/
|
||||
public CompoundIndexDefinition(DBObject keys) {
|
||||
|
||||
Assert.notNull(keys, "Keys must not be null!");
|
||||
this.keys = keys;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.Index#getIndexKeys()
|
||||
*/
|
||||
@Override
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
BasicDBObject dbo = new BasicDBObject();
|
||||
dbo.putAll(this.keys);
|
||||
dbo.putAll(super.getIndexKeys());
|
||||
return dbo;
|
||||
}
|
||||
}
|
||||
@@ -13,31 +13,29 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
/**
|
||||
* Value object to create custom {@link Metric}s on the fly.
|
||||
* Geoposatial index type.
|
||||
*
|
||||
* @author Laurent Canet
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
public class CustomMetric implements Metric {
|
||||
|
||||
private final double multiplier;
|
||||
public enum GeoSpatialIndexType {
|
||||
|
||||
/**
|
||||
* Creates a custom {@link Metric} using the given multiplier.
|
||||
*
|
||||
* @param multiplier
|
||||
* Simple 2-Dimensional index for legacy-format points.
|
||||
*/
|
||||
public CustomMetric(double multiplier) {
|
||||
this.multiplier = multiplier;
|
||||
}
|
||||
GEO_2D,
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.Metric#getMultiplier()
|
||||
/**
|
||||
* 2D Index for GeoJSON-formatted data over a sphere. Only available in Mongo 2.4.
|
||||
*/
|
||||
public double getMultiplier() {
|
||||
return multiplier;
|
||||
}
|
||||
GEO_2DSPHERE,
|
||||
|
||||
/**
|
||||
* An haystack index for grouping results over small results.
|
||||
*/
|
||||
GEO_HAYSTACK
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -13,7 +13,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
@@ -24,24 +23,72 @@ import java.lang.annotation.Target;
|
||||
/**
|
||||
* Mark a field to be indexed using MongoDB's geospatial indexing feature.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Jon Brisbin
|
||||
* @author Laurent Canet
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface GeoSpatialIndexed {
|
||||
|
||||
/**
|
||||
* Name of the property in the document that contains the [x, y] or radial coordinates to index.
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @GeoSpatialIndexed(name="index") Point h1;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
* @GeoSpatialIndexed(name="index") Point n1;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: "2d" } , { name: "hybrid.index" } )
|
||||
* db.root.ensureIndex( { nested.n1: "2d" } , { name: "nested.index" } )
|
||||
* db.hybrid.ensureIndex( { h1: "2d" } , { name: "index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults
|
||||
* to {@literal false}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
boolean useGeneratedName() default false;
|
||||
|
||||
/**
|
||||
* Name of the collection in which to create the index.
|
||||
*
|
||||
* @return
|
||||
* @deprecated The collection name is derived from the domain type. Fixing the collection via this attribute might
|
||||
* result in broken definitions. Will be removed in 1.7.
|
||||
*/
|
||||
@Deprecated
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
@@ -65,4 +112,27 @@ public @interface GeoSpatialIndexed {
|
||||
*/
|
||||
int bits() default 26;
|
||||
|
||||
/**
|
||||
* The type of the geospatial index. Default is {@link GeoSpatialIndexType#GEO_2D}
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
GeoSpatialIndexType type() default GeoSpatialIndexType.GEO_2D;
|
||||
|
||||
/**
|
||||
* The bucket size for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes, in coordinate units.
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
double bucketSize() default 1.0;
|
||||
|
||||
/**
|
||||
* The name of the additional field to use for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
String additionalField() default "";
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -25,14 +26,19 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Laurent Canet
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class GeospatialIndex implements IndexDefinition {
|
||||
|
||||
private final String field;
|
||||
private String name;
|
||||
private Integer min = null;
|
||||
private Integer max = null;
|
||||
private Integer bits = null;
|
||||
private Integer min;
|
||||
private Integer max;
|
||||
private Integer bits;
|
||||
private GeoSpatialIndexType type = GeoSpatialIndexType.GEO_2D;
|
||||
private Double bucketSize = 1.0;
|
||||
private String additionalField;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeospatialIndex} for the given field.
|
||||
@@ -40,53 +46,146 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
* @param field must not be empty or {@literal null}.
|
||||
*/
|
||||
public GeospatialIndex(String field) {
|
||||
Assert.hasText(field);
|
||||
|
||||
Assert.hasText(field, "Field must have text!");
|
||||
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex named(String name) {
|
||||
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param min
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withMin(int min) {
|
||||
this.min = Integer.valueOf(min);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param max
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withMax(int max) {
|
||||
this.max = Integer.valueOf(max);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param bits
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withBits(int bits) {
|
||||
this.bits = Integer.valueOf(bits);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param type must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex typed(GeoSpatialIndexType type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param bucketSize
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withBucketSize(double bucketSize) {
|
||||
this.bucketSize = bucketSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fieldName.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withAdditionalField(String fieldName) {
|
||||
this.additionalField = fieldName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
dbo.put(field, "2d");
|
||||
|
||||
switch (type) {
|
||||
|
||||
case GEO_2D:
|
||||
dbo.put(field, "2d");
|
||||
break;
|
||||
|
||||
case GEO_2DSPHERE:
|
||||
dbo.put(field, "2dsphere");
|
||||
break;
|
||||
|
||||
case GEO_HAYSTACK:
|
||||
dbo.put(field, "geoHaystack");
|
||||
if (!StringUtils.hasText(additionalField)) {
|
||||
throw new IllegalArgumentException("When defining geoHaystack index, an additionnal field must be defined");
|
||||
}
|
||||
dbo.put(additionalField, 1);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported geospatial index " + type);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
public DBObject getIndexOptions() {
|
||||
if (name == null && min == null && max == null) {
|
||||
|
||||
if (!StringUtils.hasText(name) && min == null && max == null && bucketSize == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
if (name != null) {
|
||||
if (StringUtils.hasText(name)) {
|
||||
dbo.put("name", name);
|
||||
}
|
||||
if (min != null) {
|
||||
dbo.put("min", min);
|
||||
}
|
||||
if (max != null) {
|
||||
dbo.put("max", max);
|
||||
}
|
||||
if (bits != null) {
|
||||
dbo.put("bits", bits);
|
||||
|
||||
switch (type) {
|
||||
|
||||
case GEO_2D:
|
||||
|
||||
if (min != null) {
|
||||
dbo.put("min", min);
|
||||
}
|
||||
if (max != null) {
|
||||
dbo.put("max", max);
|
||||
}
|
||||
if (bits != null) {
|
||||
dbo.put("bits", bits);
|
||||
}
|
||||
break;
|
||||
|
||||
case GEO_2DSPHERE:
|
||||
|
||||
break;
|
||||
|
||||
case GEO_HAYSTACK:
|
||||
|
||||
if (bucketSize != null) {
|
||||
dbo.put("bucketSize", bucketSize);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,13 +17,20 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class Index implements IndexDefinition {
|
||||
|
||||
@@ -41,8 +48,11 @@ public class Index implements IndexDefinition {
|
||||
|
||||
private boolean sparse = false;
|
||||
|
||||
public Index() {
|
||||
}
|
||||
private boolean background = false;
|
||||
|
||||
private long expire = -1;
|
||||
|
||||
public Index() {}
|
||||
|
||||
public Index(String key, Direction direction) {
|
||||
fieldSpec.put(key, direction);
|
||||
@@ -83,16 +93,71 @@ public class Index implements IndexDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reject all documents that contain a duplicate value for the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-unique/
|
||||
* @return
|
||||
*/
|
||||
public Index unique() {
|
||||
this.unique = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-sparse/
|
||||
* @return
|
||||
*/
|
||||
public Index sparse() {
|
||||
this.sparse = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the index in background (non blocking).
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
public Index background() {
|
||||
|
||||
this.background = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies TTL in seconds.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
public Index expire(long value) {
|
||||
return expire(value, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies TTL with given {@link TimeUnit}.
|
||||
*
|
||||
* @param value
|
||||
* @param unit
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
public Index expire(long value, TimeUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TimeUnit for expiration must not be null.");
|
||||
this.expire = unit.toSeconds(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping
|
||||
* @param duplicates
|
||||
* @return
|
||||
*/
|
||||
public Index unique(Duplicates duplicates) {
|
||||
if (duplicates == Duplicates.DROP) {
|
||||
this.dropDuplicates = true;
|
||||
@@ -109,11 +174,9 @@ public class Index implements IndexDefinition {
|
||||
}
|
||||
|
||||
public DBObject getIndexOptions() {
|
||||
if (name == null && !unique) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
if (name != null) {
|
||||
if (StringUtils.hasText(name)) {
|
||||
dbo.put("name", name);
|
||||
}
|
||||
if (unique) {
|
||||
@@ -125,6 +188,13 @@ public class Index implements IndexDefinition {
|
||||
if (sparse) {
|
||||
dbo.put("sparse", true);
|
||||
}
|
||||
if (background) {
|
||||
dbo.put("background", true);
|
||||
}
|
||||
if (expire >= 0) {
|
||||
dbo.put("expireAfterSeconds", expire);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,11 +20,11 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface IndexDefinition {
|
||||
|
||||
DBObject getIndexKeys();
|
||||
|
||||
DBObject getIndexOptions();
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,22 +24,33 @@ import org.springframework.util.ObjectUtils;
|
||||
* Value object for an index field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public final class IndexField {
|
||||
|
||||
enum Type {
|
||||
GEO, TEXT, DEFAULT;
|
||||
}
|
||||
|
||||
private final String key;
|
||||
private final Direction direction;
|
||||
private final boolean isGeo;
|
||||
private final Type type;
|
||||
private final Float weight;
|
||||
|
||||
private IndexField(String key, Direction direction, boolean isGeo) {
|
||||
private IndexField(String key, Direction direction, Type type) {
|
||||
this(key, direction, type, Float.NaN);
|
||||
}
|
||||
|
||||
private IndexField(String key, Direction direction, Type type, Float weight) {
|
||||
|
||||
Assert.hasText(key);
|
||||
Assert.isTrue(direction != null ^ isGeo);
|
||||
Assert.isTrue(direction != null ^ (Type.GEO.equals(type) || Type.TEXT.equals(type)));
|
||||
|
||||
this.key = key;
|
||||
this.direction = direction;
|
||||
this.isGeo = isGeo;
|
||||
this.type = type == null ? Type.DEFAULT : type;
|
||||
this.weight = weight == null ? Float.NaN : weight;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -53,12 +64,12 @@ public final class IndexField {
|
||||
@Deprecated
|
||||
public static IndexField create(String key, Order order) {
|
||||
Assert.notNull(order);
|
||||
return new IndexField(key, order.toDirection(), false);
|
||||
return new IndexField(key, order.toDirection(), Type.DEFAULT);
|
||||
}
|
||||
|
||||
public static IndexField create(String key, Direction order) {
|
||||
Assert.notNull(order);
|
||||
return new IndexField(key, order, false);
|
||||
return new IndexField(key, order, Type.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -68,7 +79,16 @@ public final class IndexField {
|
||||
* @return
|
||||
*/
|
||||
public static IndexField geo(String key) {
|
||||
return new IndexField(key, null, true);
|
||||
return new IndexField(key, null, Type.GEO);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a text {@link IndexField} for the given key.
|
||||
*
|
||||
* @since 1.6
|
||||
*/
|
||||
public static IndexField text(String key, Float weight) {
|
||||
return new IndexField(key, null, Type.TEXT, weight);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -101,10 +121,20 @@ public final class IndexField {
|
||||
/**
|
||||
* Returns whether the {@link IndexField} is a geo index field.
|
||||
*
|
||||
* @return the isGeo
|
||||
* @return true if type is {@link Type#GEO}.
|
||||
*/
|
||||
public boolean isGeo() {
|
||||
return isGeo;
|
||||
return Type.GEO.equals(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns wheter the {@link IndexField} is a text index field.
|
||||
*
|
||||
* @return true if type is {@link Type#TEXT}
|
||||
* @since 1.6
|
||||
*/
|
||||
public boolean isText() {
|
||||
return Type.TEXT.equals(type);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -125,7 +155,7 @@ public final class IndexField {
|
||||
IndexField that = (IndexField) obj;
|
||||
|
||||
return this.key.equals(that.key) && ObjectUtils.nullSafeEquals(this.direction, that.direction)
|
||||
&& this.isGeo == that.isGeo;
|
||||
&& this.type == that.type;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -138,7 +168,8 @@ public final class IndexField {
|
||||
int result = 17;
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(key);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(direction);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(isGeo);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(type);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(weight);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -148,6 +179,7 @@ public final class IndexField {
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("IndexField [ key: %s, direction: %s, isGeo: %s]", key, direction, isGeo);
|
||||
return String.format("IndexField [ key: %s, direction: %s, type: %s, weight: %s]", key, direction, type, weight);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2011 the original author or authors.
|
||||
* Copyright 2002-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,11 @@ import java.util.List;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class IndexInfo {
|
||||
|
||||
private final List<IndexField> indexFields;
|
||||
@@ -31,14 +36,30 @@ public class IndexInfo {
|
||||
private final boolean unique;
|
||||
private final boolean dropDuplicates;
|
||||
private final boolean sparse;
|
||||
private final String language;
|
||||
|
||||
/**
|
||||
* @deprecated Will be removed in 1.7. Please use {@link #IndexInfo(List, String, boolean, boolean, boolean, String)}
|
||||
* @param indexFields
|
||||
* @param name
|
||||
* @param unique
|
||||
* @param dropDuplicates
|
||||
* @param sparse
|
||||
*/
|
||||
@Deprecated
|
||||
public IndexInfo(List<IndexField> indexFields, String name, boolean unique, boolean dropDuplicates, boolean sparse) {
|
||||
this(indexFields, name, unique, dropDuplicates, sparse, "");
|
||||
}
|
||||
|
||||
public IndexInfo(List<IndexField> indexFields, String name, boolean unique, boolean dropDuplicates, boolean sparse,
|
||||
String language) {
|
||||
|
||||
this.indexFields = Collections.unmodifiableList(indexFields);
|
||||
this.name = name;
|
||||
this.unique = unique;
|
||||
this.dropDuplicates = dropDuplicates;
|
||||
this.sparse = sparse;
|
||||
this.language = language;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -84,14 +105,23 @@ public class IndexInfo {
|
||||
return sparse;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
public String getLanguage() {
|
||||
return language;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", dropDuplicates="
|
||||
+ dropDuplicates + ", sparse=" + sparse + "]";
|
||||
+ dropDuplicates + ", sparse=" + sparse + ", language=" + language + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + (dropDuplicates ? 1231 : 1237);
|
||||
@@ -99,6 +129,7 @@ public class IndexInfo {
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
result = prime * result + (sparse ? 1231 : 1237);
|
||||
result = prime * result + (unique ? 1231 : 1237);
|
||||
result = prime * result + ObjectUtils.nullSafeHashCode(language);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -137,6 +168,9 @@ public class IndexInfo {
|
||||
if (unique != other.unique) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(language, other.language)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
interface IndexResolver {
|
||||
|
||||
/**
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@code type}. {@link IndexDefinition}s are created
|
||||
* for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @param type
|
||||
* @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type.
|
||||
*/
|
||||
Iterable<? extends IndexDefinitionHolder> resolveIndexForClass(Class<?> type);
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,21 +27,94 @@ import java.lang.annotation.Target;
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface Indexed {
|
||||
|
||||
/**
|
||||
* If set to true reject all documents that contain a duplicate value for the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-unique/
|
||||
* @return
|
||||
*/
|
||||
boolean unique() default false;
|
||||
|
||||
IndexDirection direction() default IndexDirection.ASCENDING;
|
||||
|
||||
/**
|
||||
* If set to true index will skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-sparse/
|
||||
* @return
|
||||
*/
|
||||
boolean sparse() default false;
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping
|
||||
* @return
|
||||
*/
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @Indexed(name="index") String h1;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
* @Indexed(name="index") String n1;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: 1 } , { name: "hybrid.index" } )
|
||||
* db.root.ensureIndex( { nested.n1: 1 } , { name: "nested.index" } )
|
||||
* db.hybrid.ensureIndex( { h1: 1} , { name: "index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults
|
||||
* to {@literal false}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
boolean useGeneratedName() default false;
|
||||
|
||||
/**
|
||||
* Collection name for index to be created on.
|
||||
*
|
||||
* @return
|
||||
* @deprecated The collection name is derived from the domain type. Fixing the collection via this attribute might
|
||||
* result in broken definitions. Will be removed in 1.7.
|
||||
*/
|
||||
@Deprecated
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -60,4 +60,10 @@ public class MongoMappingEventPublisher implements ApplicationEventPublisher {
|
||||
indexCreator.onApplicationEvent((MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>) event);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationEventPublisher#publishEvent(java.lang.Object)
|
||||
*/
|
||||
public void publishEvent(Object event) {}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
@@ -23,19 +22,14 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
* Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext}
|
||||
@@ -45,30 +39,47 @@ import com.mongodb.util.JSON;
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
* @author Laurent Canet
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
public class MongoPersistentEntityIndexCreator implements ApplicationListener<MappingContextEvent<?, ?>> {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
|
||||
private final Map<Class<?>, Boolean> classesSeen = new ConcurrentHashMap<Class<?>, Boolean>();
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoMappingContext mappingContext;
|
||||
private final IndexResolver indexResolver;
|
||||
|
||||
/**
|
||||
* Creats a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}
|
||||
* @param mongoDbFactory must not be {@literal null}
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory) {
|
||||
this(mappingContext, mongoDbFactory, new MongoPersistentEntityIndexResolver(mappingContext));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param indexResolver must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory,
|
||||
IndexResolver indexResolver) {
|
||||
|
||||
Assert.notNull(mongoDbFactory);
|
||||
Assert.notNull(mappingContext);
|
||||
Assert.notNull(indexResolver);
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.mappingContext = mappingContext;
|
||||
this.indexResolver = indexResolver;
|
||||
|
||||
for (MongoPersistentEntity<?> entity : mappingContext.getPersistentEntities()) {
|
||||
checkForIndexes(entity);
|
||||
@@ -79,7 +90,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
public void onApplicationEvent(MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty> event) {
|
||||
public void onApplicationEvent(MappingContextEvent<?, ?> event) {
|
||||
|
||||
if (!event.wasEmittedBy(mappingContext)) {
|
||||
return;
|
||||
@@ -89,91 +100,40 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
|
||||
// Double check type as Spring infrastructure does not consider nested generics
|
||||
if (entity instanceof MongoPersistentEntity) {
|
||||
checkForIndexes(event.getPersistentEntity());
|
||||
checkForIndexes((MongoPersistentEntity<?>) entity);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkForIndexes(final MongoPersistentEntity<?> entity) {
|
||||
final Class<?> type = entity.getType();
|
||||
private void checkForIndexes(final MongoPersistentEntity<?> entity) {
|
||||
|
||||
Class<?> type = entity.getType();
|
||||
|
||||
if (!classesSeen.containsKey(type)) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Analyzing class " + type + " for index information.");
|
||||
|
||||
this.classesSeen.put(type, Boolean.TRUE);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Analyzing class " + type + " for index information.");
|
||||
}
|
||||
|
||||
// Make sure indexes get created
|
||||
if (type.isAnnotationPresent(CompoundIndexes.class)) {
|
||||
CompoundIndexes indexes = type.getAnnotation(CompoundIndexes.class);
|
||||
for (CompoundIndex index : indexes.value()) {
|
||||
|
||||
String indexColl = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
DBObject definition = (DBObject) JSON.parse(index.def());
|
||||
|
||||
ensureIndex(indexColl, index.name(), definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created compound index " + index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
Field field = persistentProperty.getField();
|
||||
|
||||
if (field.isAnnotationPresent(Indexed.class)) {
|
||||
|
||||
Indexed index = field.getAnnotation(Indexed.class);
|
||||
String name = index.name();
|
||||
|
||||
if (!StringUtils.hasText(name)) {
|
||||
name = persistentProperty.getFieldName();
|
||||
} else {
|
||||
if (!name.equals(field.getName()) && index.unique() && !index.sparse()) {
|
||||
// Names don't match, and sparse is not true. This situation will generate an error on the server.
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("The index name " + name + " doesn't match this property name: " + field.getName()
|
||||
+ ". Setting sparse=true on this index will prevent errors when inserting documents.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
int direction = index.direction() == IndexDirection.ASCENDING ? 1 : -1;
|
||||
DBObject definition = new BasicDBObject(persistentProperty.getFieldName(), direction);
|
||||
|
||||
ensureIndex(collection, name, definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created property index " + index);
|
||||
}
|
||||
|
||||
} else if (field.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
|
||||
GeoSpatialIndexed index = field.getAnnotation(GeoSpatialIndexed.class);
|
||||
|
||||
GeospatialIndex indexObject = new GeospatialIndex(persistentProperty.getFieldName());
|
||||
indexObject.withMin(index.min()).withMax(index.max());
|
||||
indexObject.named(StringUtils.hasText(index.name()) ? index.name() : field.getName());
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
mongoDbFactory.getDb().getCollection(collection)
|
||||
.ensureIndex(indexObject.getIndexKeys(), indexObject.getIndexOptions());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(String.format("Created %s for entity %s in collection %s! ", indexObject, entity.getType(),
|
||||
collection));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
classesSeen.put(type, true);
|
||||
checkForAndCreateIndexes(entity);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkForAndCreateIndexes(MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.findAnnotation(Document.class) != null) {
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexForClass(entity.getType())) {
|
||||
createIndex(indexToCreate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current index creator was registered for the given {@link MappingContext}.
|
||||
*
|
||||
@@ -183,33 +143,4 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
public boolean isIndexCreatorFor(MappingContext<?, ?> context) {
|
||||
return this.mappingContext.equals(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers the actual index creation.
|
||||
*
|
||||
* @param collection the collection to create the index in
|
||||
* @param name the name of the index about to be created
|
||||
* @param indexDefinition the index definition
|
||||
* @param unique whether it shall be a unique index
|
||||
* @param dropDups whether to drop duplicates
|
||||
* @param sparse sparse or not
|
||||
* @param background whether the index will be created in the background
|
||||
* @param expireAfterSeconds the time to live for documents in the collection
|
||||
*/
|
||||
protected void ensureIndex(String collection, String name, DBObject indexDefinition, boolean unique,
|
||||
boolean dropDups, boolean sparse, boolean background, int expireAfterSeconds) {
|
||||
|
||||
DBObject opts = new BasicDBObject();
|
||||
opts.put("name", name);
|
||||
opts.put("dropDups", dropDups);
|
||||
opts.put("sparse", sparse);
|
||||
opts.put("unique", unique);
|
||||
opts.put("background", background);
|
||||
|
||||
if (expireAfterSeconds != -1) {
|
||||
opts.put("expireAfterSeconds", expireAfterSeconds);
|
||||
}
|
||||
|
||||
mongoDbFactory.getDb().getCollection(collection).ensureIndex(indexDefinition, opts);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,681 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mongodb.core.index.Index.Duplicates;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.TextIndexIncludeOptions.IncludeStrategy;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexedFieldSpec;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} implementation inspecting {@link MongoPersistentEntity} for {@link MongoPersistentEntity} to be
|
||||
* indexed. <br />
|
||||
* All {@link MongoPersistentProperty} of the {@link MongoPersistentEntity} are inspected for potential indexes by
|
||||
* scanning related annotations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexResolver.class);
|
||||
|
||||
private final MongoMappingContext mappingContext;
|
||||
|
||||
/**
|
||||
* Create new {@link MongoPersistentEntityIndexResolver}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexResolver(MongoMappingContext mappingContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "Mapping context must not be null in order to resolve index definitions");
|
||||
this.mappingContext = mappingContext;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexResolver#resolveIndexForClass(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public List<IndexDefinitionHolder> resolveIndexForClass(Class<?> type) {
|
||||
return resolveIndexForEntity(mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the {@link IndexDefinition}s for given {@literal root} entity by traversing {@link MongoPersistentProperty}
|
||||
* scanning for index annotations {@link Indexed}, {@link CompoundIndex} and {@link GeospatialIndex}. The given
|
||||
* {@literal root} has therefore to be annotated with {@link Document}.
|
||||
*
|
||||
* @param root must not be null.
|
||||
* @return List of {@link IndexDefinitionHolder}. Will never be {@code null}.
|
||||
* @throws IllegalArgumentException in case of missing {@link Document} annotation marking root entities.
|
||||
*/
|
||||
public List<IndexDefinitionHolder> resolveIndexForEntity(final MongoPersistentEntity<?> root) {
|
||||
|
||||
Assert.notNull(root, "Index cannot be resolved for given 'null' entity.");
|
||||
Document document = root.findAnnotation(Document.class);
|
||||
Assert.notNull(document, "Given entity is not collection root.");
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", root.getCollection(), root));
|
||||
indexInformation.addAll(potentiallyCreateTextIndexDefinition(root));
|
||||
|
||||
final CycleGuard guard = new CycleGuard();
|
||||
|
||||
root.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
try {
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(),
|
||||
persistentProperty.getFieldName(), root.getCollection(), guard));
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(
|
||||
persistentProperty.getFieldName(), root.getCollection(), persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexInformation.add(indexDefinitionHolder);
|
||||
}
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively resolve and inspect properties of given {@literal type} for indexes to be created.
|
||||
*
|
||||
* @param type
|
||||
* @param path The {@literal "dot} path.
|
||||
* @param collection
|
||||
* @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property
|
||||
* types. Will never be {@code null}.
|
||||
*/
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final Class<?> type, final String path,
|
||||
final String collection, final CycleGuard guard) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(path, collection, entity));
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(path) ? path + "." : "") + persistentProperty.getFieldName();
|
||||
guard.protect(persistentProperty, path);
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
try {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(), propertyDotPath,
|
||||
collection, guard));
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath,
|
||||
collection, persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexInformation.add(indexDefinitionHolder);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
private IndexDefinitionHolder createIndexDefinitionHolderForProperty(String dotPath, String collection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isAnnotationPresent(Indexed.class)) {
|
||||
return createIndexDefinition(dotPath, collection, persistentProperty);
|
||||
} else if (persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
return createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> potentiallyCreateCompoundIndexDefinitions(String dotPath, String collection,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.findAnnotation(CompoundIndexes.class) == null && entity.findAnnotation(CompoundIndex.class) == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return createCompoundIndexDefinitions(dotPath, collection, entity);
|
||||
}
|
||||
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(MongoPersistentEntity<?> root) {
|
||||
|
||||
TextIndexDefinitionBuilder indexDefinitionBuilder = new TextIndexDefinitionBuilder().named(root.getType()
|
||||
.getSimpleName() + "_TextIndex");
|
||||
|
||||
if (StringUtils.hasText(root.getLanguage())) {
|
||||
indexDefinitionBuilder.withDefaultLanguage(root.getLanguage());
|
||||
}
|
||||
|
||||
try {
|
||||
appendTextIndexInformation("", indexDefinitionBuilder, root,
|
||||
new TextIndexIncludeOptions(IncludeStrategy.DEFAULT), new CycleGuard());
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
|
||||
TextIndexDefinition indexDefinition = indexDefinitionBuilder.build();
|
||||
|
||||
if (!indexDefinition.hasFieldSpec()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, root.getCollection());
|
||||
return Collections.singletonList(holder);
|
||||
|
||||
}
|
||||
|
||||
private void appendTextIndexInformation(final String dotPath,
|
||||
final TextIndexDefinitionBuilder indexDefinitionBuilder, final MongoPersistentEntity<?> entity,
|
||||
final TextIndexIncludeOptions includeOptions, final CycleGuard guard) {
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
guard.protect(persistentProperty, dotPath);
|
||||
|
||||
if (persistentProperty.isExplicitLanguageProperty() && !StringUtils.hasText(dotPath)) {
|
||||
indexDefinitionBuilder.withLanguageOverride(persistentProperty.getFieldName());
|
||||
}
|
||||
|
||||
TextIndexed indexed = persistentProperty.findAnnotation(TextIndexed.class);
|
||||
|
||||
if (includeOptions.isForce() || indexed != null || persistentProperty.isEntity()) {
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(dotPath) ? dotPath + "." : "")
|
||||
+ persistentProperty.getFieldName();
|
||||
|
||||
Float weight = indexed != null ? indexed.weight()
|
||||
: (includeOptions.getParentFieldSpec() != null ? includeOptions.getParentFieldSpec().getWeight() : 1.0F);
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
|
||||
TextIndexIncludeOptions optionsForNestedType = includeOptions;
|
||||
if (!IncludeStrategy.FORCE.equals(includeOptions.getStrategy()) && indexed != null) {
|
||||
optionsForNestedType = new TextIndexIncludeOptions(IncludeStrategy.FORCE, new TextIndexedFieldSpec(
|
||||
propertyDotPath, weight));
|
||||
}
|
||||
|
||||
try {
|
||||
appendTextIndexInformation(propertyDotPath, indexDefinitionBuilder,
|
||||
mappingContext.getPersistentEntity(persistentProperty.getActualType()), optionsForNestedType, guard);
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage(), e);
|
||||
} catch (InvalidDataAccessApiUsageException e) {
|
||||
LOGGER.info(
|
||||
String.format("Potentially invalid index structure discovered. Breaking operation for %s.",
|
||||
entity.getName()), e);
|
||||
}
|
||||
} else if (includeOptions.isForce() || indexed != null) {
|
||||
indexDefinitionBuilder.onField(propertyDotPath, weight);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of given type.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param fallbackCollection
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
protected List<IndexDefinitionHolder> createCompoundIndexDefinitions(String dotPath, String fallbackCollection,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
CompoundIndexes indexes = entity.findAnnotation(CompoundIndexes.class);
|
||||
|
||||
if (indexes != null) {
|
||||
for (CompoundIndex index : indexes.value()) {
|
||||
indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index, entity));
|
||||
}
|
||||
}
|
||||
|
||||
CompoundIndex index = entity.findAnnotation(CompoundIndex.class);
|
||||
|
||||
if (index != null) {
|
||||
indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index, entity));
|
||||
}
|
||||
|
||||
return indexDefinitions;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String fallbackCollection,
|
||||
CompoundIndex index, MongoPersistentEntity<?> entity) {
|
||||
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(resolveCompoundIndexKeyFromStringDefinition(
|
||||
dotPath, index.def()));
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
indexDefinition.unique(index.dropDups() ? Duplicates.DROP : Duplicates.RETAIN);
|
||||
}
|
||||
|
||||
if (index.sparse()) {
|
||||
indexDefinition.sparse();
|
||||
}
|
||||
|
||||
if (index.background()) {
|
||||
indexDefinition.background();
|
||||
}
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection;
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private DBObject resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString) {
|
||||
|
||||
if (!StringUtils.hasText(dotPath) && !StringUtils.hasText(keyDefinitionString)) {
|
||||
throw new InvalidDataAccessApiUsageException("Cannot create index on root level for empty keys.");
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(keyDefinitionString)) {
|
||||
return new BasicDBObject(dotPath, 1);
|
||||
}
|
||||
|
||||
DBObject dbo = (DBObject) JSON.parse(keyDefinitionString);
|
||||
if (!StringUtils.hasText(dotPath)) {
|
||||
return dbo;
|
||||
}
|
||||
|
||||
BasicDBObjectBuilder dboBuilder = new BasicDBObjectBuilder();
|
||||
|
||||
for (String key : dbo.keySet()) {
|
||||
dboBuilder.add(dotPath + "." + key, dbo.get(key));
|
||||
}
|
||||
return dboBuilder.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param collection
|
||||
* @param persitentProperty
|
||||
* @return
|
||||
*/
|
||||
protected IndexDefinitionHolder createIndexDefinition(String dotPath, String fallbackCollection,
|
||||
MongoPersistentProperty persitentProperty) {
|
||||
|
||||
Indexed index = persitentProperty.findAnnotation(Indexed.class);
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection;
|
||||
|
||||
Index indexDefinition = new Index().on(dotPath,
|
||||
IndexDirection.ASCENDING.equals(index.direction()) ? Sort.Direction.ASC : Sort.Direction.DESC);
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
indexDefinition.unique(index.dropDups() ? Duplicates.DROP : Duplicates.RETAIN);
|
||||
}
|
||||
|
||||
if (index.sparse()) {
|
||||
indexDefinition.sparse();
|
||||
}
|
||||
|
||||
if (index.background()) {
|
||||
indexDefinition.background();
|
||||
}
|
||||
|
||||
if (index.expireAfterSeconds() >= 0) {
|
||||
indexDefinition.expire(index.expireAfterSeconds(), TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link GeoSpatialIndexed} for
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param collection
|
||||
* @param persistentProperty
|
||||
* @return
|
||||
*/
|
||||
protected IndexDefinitionHolder createGeoSpatialIndexDefinition(String dotPath, String fallbackCollection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
GeoSpatialIndexed index = persistentProperty.findAnnotation(GeoSpatialIndexed.class);
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection;
|
||||
|
||||
GeospatialIndex indexDefinition = new GeospatialIndex(dotPath);
|
||||
indexDefinition.withBits(index.bits());
|
||||
indexDefinition.withMin(index.min()).withMax(index.max());
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persistentProperty));
|
||||
}
|
||||
|
||||
indexDefinition.typed(index.type()).withBucketSize(index.bucketSize()).withAdditionalField(index.additionalField());
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private String pathAwareIndexName(String indexName, String dotPath, MongoPersistentProperty property) {
|
||||
|
||||
String nameToUse = StringUtils.hasText(indexName) ? indexName : "";
|
||||
|
||||
if (!StringUtils.hasText(dotPath) || (property != null && dotPath.equals(property.getFieldName()))) {
|
||||
return StringUtils.hasText(nameToUse) ? nameToUse : dotPath;
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(dotPath)) {
|
||||
|
||||
nameToUse = StringUtils.hasText(nameToUse) ? (property != null ? dotPath.replace("." + property.getFieldName(),
|
||||
"") : dotPath) + "." + nameToUse : dotPath;
|
||||
}
|
||||
return nameToUse;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used
|
||||
* to detect potential cycles within the references.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class CycleGuard {
|
||||
|
||||
private final Map<String, List<Path>> propertyTypeMap;
|
||||
|
||||
CycleGuard() {
|
||||
this.propertyTypeMap = new LinkedHashMap<String, List<Path>>();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param property The property to inspect
|
||||
* @param path The path under which the property can be reached.
|
||||
* @throws CyclicPropertyReferenceException in case a potential cycle is detected.
|
||||
* @see Path#cycles(MongoPersistentProperty, String)
|
||||
*/
|
||||
void protect(MongoPersistentProperty property, String path) throws CyclicPropertyReferenceException {
|
||||
|
||||
String propertyTypeKey = createMapKey(property);
|
||||
if (propertyTypeMap.containsKey(propertyTypeKey)) {
|
||||
|
||||
List<Path> paths = propertyTypeMap.get(propertyTypeKey);
|
||||
|
||||
for (Path existingPath : paths) {
|
||||
|
||||
if (existingPath.cycles(property, path) && property.isEntity()) {
|
||||
paths.add(new Path(property, path));
|
||||
|
||||
throw new CyclicPropertyReferenceException(property.getFieldName(), property.getOwner().getType(),
|
||||
existingPath.getPath());
|
||||
}
|
||||
}
|
||||
|
||||
paths.add(new Path(property, path));
|
||||
} else {
|
||||
|
||||
ArrayList<Path> paths = new ArrayList<Path>();
|
||||
paths.add(new Path(property, path));
|
||||
propertyTypeMap.put(propertyTypeKey, paths);
|
||||
}
|
||||
}
|
||||
|
||||
private String createMapKey(MongoPersistentProperty property) {
|
||||
return property.getOwner().getType().getSimpleName() + ":" + property.getFieldName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Path defines the property and its full path from the document root. <br />
|
||||
* A {@link Path} with {@literal spring.data.mongodb} would be created for the property {@code Three.mongodb}.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class One {
|
||||
* Two spring;
|
||||
* }
|
||||
*
|
||||
* class Two {
|
||||
* Three data;
|
||||
* }
|
||||
*
|
||||
* class Three {
|
||||
* String mongodb;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class Path {
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private final String path;
|
||||
|
||||
Path(MongoPersistentProperty property, String path) {
|
||||
|
||||
this.property = property;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the given property is owned by the same entity and if it has been already visited by a subset of
|
||||
* the current path. Given {@literal foo.bar.bar} cycles if {@literal foo.bar} has already been visited and
|
||||
* {@code class Bar} contains a property of type {@code Bar}. The previously mentioned path would not cycle if
|
||||
* {@code class Bar} contained a property of type {@code SomeEntity} named {@literal bar}.
|
||||
*
|
||||
* @param property
|
||||
* @param path
|
||||
* @return
|
||||
*/
|
||||
boolean cycles(MongoPersistentProperty property, String path) {
|
||||
|
||||
if (!property.getOwner().equals(this.property.getOwner())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return path.equals(this.path) || path.contains(this.path + ".") || path.contains("." + this.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public static class CyclicPropertyReferenceException extends RuntimeException {
|
||||
|
||||
private static final long serialVersionUID = -3762979307658772277L;
|
||||
|
||||
private final String propertyName;
|
||||
private final Class<?> type;
|
||||
private final String dotPath;
|
||||
|
||||
public CyclicPropertyReferenceException(String propertyName, Class<?> type, String dotPath) {
|
||||
|
||||
this.propertyName = propertyName;
|
||||
this.type = type;
|
||||
this.dotPath = dotPath;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Throwable#getMessage()
|
||||
*/
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return String.format("Found cycle for field '%s' in type '%s' for path '%s'", propertyName,
|
||||
type != null ? type.getSimpleName() : "unknown", dotPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of {@link IndexDefinition} holding additional (property)path information used for creating the
|
||||
* index. The path itself is the properties {@literal "dot"} path representation from its root document.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public static class IndexDefinitionHolder implements IndexDefinition {
|
||||
|
||||
private final String path;
|
||||
private final IndexDefinition indexDefinition;
|
||||
private final String collection;
|
||||
|
||||
/**
|
||||
* Create
|
||||
*
|
||||
* @param path
|
||||
*/
|
||||
public IndexDefinitionHolder(String path, IndexDefinition definition, String collection) {
|
||||
|
||||
this.path = path;
|
||||
this.indexDefinition = definition;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
public String getCollection() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@literal "dot"} path used to create the index.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@literal raw} {@link IndexDefinition}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public IndexDefinition getIndexDefinition() {
|
||||
return indexDefinition;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys()
|
||||
*/
|
||||
@Override
|
||||
public DBObject getIndexKeys() {
|
||||
return indexDefinition.getIndexKeys();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions()
|
||||
*/
|
||||
@Override
|
||||
public DBObject getIndexOptions() {
|
||||
return indexDefinition.getIndexOptions();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
static class TextIndexIncludeOptions {
|
||||
|
||||
enum IncludeStrategy {
|
||||
FORCE, DEFAULT;
|
||||
}
|
||||
|
||||
private final IncludeStrategy strategy;
|
||||
|
||||
private final TextIndexedFieldSpec parentFieldSpec;
|
||||
|
||||
public TextIndexIncludeOptions(IncludeStrategy strategy, TextIndexedFieldSpec parentFieldSpec) {
|
||||
this.strategy = strategy;
|
||||
this.parentFieldSpec = parentFieldSpec;
|
||||
}
|
||||
|
||||
public TextIndexIncludeOptions(IncludeStrategy strategy) {
|
||||
this(strategy, null);
|
||||
}
|
||||
|
||||
public IncludeStrategy getStrategy() {
|
||||
return strategy;
|
||||
}
|
||||
|
||||
public TextIndexedFieldSpec getParentFieldSpec() {
|
||||
return parentFieldSpec;
|
||||
}
|
||||
|
||||
public boolean isForce() {
|
||||
return IncludeStrategy.FORCE.equals(strategy);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,336 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link IndexDefinition} to span multiple keys for text search.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
public class TextIndexDefinition implements IndexDefinition {
|
||||
|
||||
private String name;
|
||||
private Set<TextIndexedFieldSpec> fieldSpecs;
|
||||
private String defaultLanguage;
|
||||
private String languageOverride;
|
||||
|
||||
TextIndexDefinition() {
|
||||
fieldSpecs = new LinkedHashSet<TextIndexedFieldSpec>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link TextIndexDefinition} for all fields in the document.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static TextIndexDefinition forAllFields() {
|
||||
return new TextIndexDefinitionBuilder().onAllFields().build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get {@link TextIndexDefinitionBuilder} to create {@link TextIndexDefinition}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static TextIndexDefinitionBuilder builder() {
|
||||
return new TextIndexDefinitionBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fieldSpec
|
||||
*/
|
||||
public void addFieldSpec(TextIndexedFieldSpec fieldSpec) {
|
||||
this.fieldSpecs.add(fieldSpec);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fieldSpecs
|
||||
*/
|
||||
public void addFieldSpecs(Collection<TextIndexedFieldSpec> fieldSpecs) {
|
||||
this.fieldSpecs.addAll(fieldSpecs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns if the {@link TextIndexDefinition} has fields assigned.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean hasFieldSpec() {
|
||||
return !fieldSpecs.isEmpty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys()
|
||||
*/
|
||||
@Override
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
DBObject keys = new BasicDBObject();
|
||||
for (TextIndexedFieldSpec fieldSpec : fieldSpecs) {
|
||||
keys.put(fieldSpec.fieldname, "text");
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions()
|
||||
*/
|
||||
@Override
|
||||
public DBObject getIndexOptions() {
|
||||
|
||||
DBObject options = new BasicDBObject();
|
||||
if (StringUtils.hasText(name)) {
|
||||
options.put("name", name);
|
||||
}
|
||||
if (StringUtils.hasText(defaultLanguage)) {
|
||||
options.put("default_language", defaultLanguage);
|
||||
}
|
||||
|
||||
BasicDBObject weightsDbo = new BasicDBObject();
|
||||
for (TextIndexedFieldSpec fieldSpec : fieldSpecs) {
|
||||
if (fieldSpec.isWeighted()) {
|
||||
weightsDbo.put(fieldSpec.getFieldname(), fieldSpec.getWeight());
|
||||
}
|
||||
}
|
||||
|
||||
if (!weightsDbo.isEmpty()) {
|
||||
options.put("weights", weightsDbo);
|
||||
}
|
||||
if (StringUtils.hasText(languageOverride)) {
|
||||
options.put("language_override", languageOverride);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
public static class TextIndexedFieldSpec {
|
||||
|
||||
private final String fieldname;
|
||||
private final Float weight;
|
||||
|
||||
/**
|
||||
* Create new {@link TextIndexedFieldSpec} for given fieldname without any weight.
|
||||
*
|
||||
* @param fieldname
|
||||
*/
|
||||
public TextIndexedFieldSpec(String fieldname) {
|
||||
this(fieldname, 1.0F);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link TextIndexedFieldSpec} for given fieldname and weight.
|
||||
*
|
||||
* @param fieldname
|
||||
* @param weight
|
||||
*/
|
||||
public TextIndexedFieldSpec(String fieldname, Float weight) {
|
||||
|
||||
Assert.hasText(fieldname, "Text index field cannot be blank.");
|
||||
this.fieldname = fieldname;
|
||||
this.weight = weight != null ? weight : 1.0F;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the fieldname associated with the {@link TextIndexedFieldSpec}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getFieldname() {
|
||||
return fieldname;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the weight associated with the {@link TextIndexedFieldSpec}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Float getWeight() {
|
||||
return weight;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if {@link #weight} has a value that is a valid number.
|
||||
*/
|
||||
public boolean isWeighted() {
|
||||
return this.weight != null && this.weight.compareTo(1.0F) != 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return ObjectUtils.nullSafeHashCode(fieldname);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (!(obj instanceof TextIndexedFieldSpec)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TextIndexedFieldSpec other = (TextIndexedFieldSpec) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.fieldname, other.fieldname);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TextIndexDefinitionBuilder} helps defining options for creating {@link TextIndexDefinition}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
public static class TextIndexDefinitionBuilder {
|
||||
|
||||
private TextIndexDefinition instance;
|
||||
private static final TextIndexedFieldSpec ALL_FIELDS = new TextIndexedFieldSpec("$**");
|
||||
|
||||
public TextIndexDefinitionBuilder() {
|
||||
this.instance = new TextIndexDefinition();
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the name to be used when creating the index in the store.
|
||||
*
|
||||
* @param name
|
||||
* @return
|
||||
*/
|
||||
public TextIndexDefinitionBuilder named(String name) {
|
||||
this.instance.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the index to span all fields using wilcard. <br/>
|
||||
* <strong>NOTE</strong> {@link TextIndexDefinition} cannot contain any other fields when defined with wildcard.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public TextIndexDefinitionBuilder onAllFields() {
|
||||
|
||||
if (!instance.fieldSpecs.isEmpty()) {
|
||||
throw new InvalidDataAccessApiUsageException("Cannot add wildcard fieldspect to non empty.");
|
||||
}
|
||||
|
||||
this.instance.fieldSpecs.add(ALL_FIELDS);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include given fields with default weight.
|
||||
*
|
||||
* @param fieldnames
|
||||
* @return
|
||||
*/
|
||||
public TextIndexDefinitionBuilder onFields(String... fieldnames) {
|
||||
|
||||
for (String fieldname : fieldnames) {
|
||||
onField(fieldname);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include given field with default weight.
|
||||
*
|
||||
* @param fieldname
|
||||
* @return
|
||||
*/
|
||||
public TextIndexDefinitionBuilder onField(String fieldname) {
|
||||
return onField(fieldname, 1F);
|
||||
}
|
||||
|
||||
/**
|
||||
* Include given field with weight.
|
||||
*
|
||||
* @param fieldname
|
||||
* @return
|
||||
*/
|
||||
public TextIndexDefinitionBuilder onField(String fieldname, Float weight) {
|
||||
|
||||
if (this.instance.fieldSpecs.contains(ALL_FIELDS)) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format("Cannot add %s to field spec for all fields.",
|
||||
fieldname));
|
||||
}
|
||||
|
||||
this.instance.fieldSpecs.add(new TextIndexedFieldSpec(fieldname, weight));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the default language to be used when indexing documents.
|
||||
*
|
||||
* @param language
|
||||
* @see http://docs.mongodb.org/manual/tutorial/specify-language-for-text-index/#specify-default-language-text-index
|
||||
* @return
|
||||
*/
|
||||
public TextIndexDefinitionBuilder withDefaultLanguage(String language) {
|
||||
|
||||
this.instance.defaultLanguage = language;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define field for language override.
|
||||
*
|
||||
* @param fieldname
|
||||
* @return
|
||||
*/
|
||||
public TextIndexDefinitionBuilder withLanguageOverride(String fieldname) {
|
||||
|
||||
if (StringUtils.hasText(this.instance.languageOverride)) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format(
|
||||
"Cannot set language override on %s as it is already defined on %s.", fieldname,
|
||||
this.instance.languageOverride));
|
||||
}
|
||||
|
||||
this.instance.languageOverride = fieldname;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TextIndexDefinition build() {
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* {@link TextIndexed} marks a field to be part of the text index. As there can be only one text index per collection
|
||||
* all fields marked with {@link TextIndexed} are combined into one single index. <br />
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
@Documented
|
||||
@Target({ ElementType.FIELD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface TextIndexed {
|
||||
|
||||
/**
|
||||
* Defines the significance of the filed relative to other indexed fields. The value directly influences the documents
|
||||
* score. <br/>
|
||||
* Defaulted to {@literal 1.0}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
float weight() default 1.0F;
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
@@ -35,9 +36,11 @@ import org.springframework.data.mongodb.MongoCollectionUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.ParserContext;
|
||||
import org.springframework.expression.common.LiteralExpression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.expression.spel.support.StandardEvaluationContext;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -47,36 +50,45 @@ import org.springframework.util.StringUtils;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, MongoPersistentProperty> implements
|
||||
MongoPersistentEntity<T>, ApplicationContextAware {
|
||||
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @DocumentField annotation!";
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @Field annotation!";
|
||||
private static final SpelExpressionParser PARSER = new SpelExpressionParser();
|
||||
|
||||
private final String collection;
|
||||
private final SpelExpressionParser parser;
|
||||
private final String language;
|
||||
|
||||
private final StandardEvaluationContext context;
|
||||
private final Expression expression;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the
|
||||
* collection name to the entities simple type name.
|
||||
*
|
||||
* @param typeInformation
|
||||
* @param typeInformation must not be {@literal null}.
|
||||
*/
|
||||
public BasicMongoPersistentEntity(TypeInformation<T> typeInformation) {
|
||||
|
||||
super(typeInformation, MongoPersistentPropertyComparator.INSTANCE);
|
||||
|
||||
this.parser = new SpelExpressionParser();
|
||||
this.context = new StandardEvaluationContext();
|
||||
|
||||
Class<?> rawType = typeInformation.getType();
|
||||
String fallback = MongoCollectionUtils.getPreferredCollectionName(rawType);
|
||||
|
||||
if (rawType.isAnnotationPresent(Document.class)) {
|
||||
Document d = rawType.getAnnotation(Document.class);
|
||||
this.collection = StringUtils.hasText(d.collection()) ? d.collection() : fallback;
|
||||
Document document = rawType.getAnnotation(Document.class);
|
||||
|
||||
this.expression = detectExpression(document);
|
||||
this.context = new StandardEvaluationContext();
|
||||
|
||||
if (document != null) {
|
||||
|
||||
this.collection = StringUtils.hasText(document.collection()) ? document.collection() : fallback;
|
||||
this.language = StringUtils.hasText(document.language()) ? document.language() : "";
|
||||
} else {
|
||||
this.collection = fallback;
|
||||
this.language = "";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,8 +108,34 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getCollection()
|
||||
*/
|
||||
public String getCollection() {
|
||||
Expression expression = parser.parseExpression(collection, ParserContext.TEMPLATE_EXPRESSION);
|
||||
return expression.getValue(context, String.class);
|
||||
return expression == null ? collection : expression.getValue(context, String.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getLanguage()
|
||||
*/
|
||||
@Override
|
||||
public String getLanguage() {
|
||||
return this.language;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getTextScoreProperty()
|
||||
*/
|
||||
@Override
|
||||
public MongoPersistentProperty getTextScoreProperty() {
|
||||
return getPersistentProperty(TextScore.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#hasTextScoreProperty()
|
||||
*/
|
||||
@Override
|
||||
public boolean hasTextScoreProperty() {
|
||||
return getTextScoreProperty() != null;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -107,12 +145,22 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
@Override
|
||||
public void verify() {
|
||||
|
||||
verifyFieldUniqueness();
|
||||
verifyFieldTypes();
|
||||
}
|
||||
|
||||
private void verifyFieldUniqueness() {
|
||||
|
||||
AssertFieldNameUniquenessHandler handler = new AssertFieldNameUniquenessHandler();
|
||||
|
||||
doWithProperties(handler);
|
||||
doWithAssociations(handler);
|
||||
}
|
||||
|
||||
private void verifyFieldTypes() {
|
||||
doWithProperties(new PropertyTypeAssertionHandler());
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Comparator} implementation inspecting the {@link MongoPersistentProperty}'s order.
|
||||
*
|
||||
@@ -194,6 +242,31 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a SpEL {@link Expression} frór the collection String expressed in the given {@link Document} annotation if
|
||||
* present or {@literal null} otherwise. Will also return {@literal null} it the collection {@link String} evaluates
|
||||
* to a {@link LiteralExpression} (indicating that no subsequent evaluation is necessary).
|
||||
*
|
||||
* @param document can be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private static Expression detectExpression(Document document) {
|
||||
|
||||
if (document == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String collection = document.collection();
|
||||
|
||||
if (!StringUtils.hasText(collection)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Expression expression = PARSER.parseExpression(document.collection(), ParserContext.TEMPLATE_EXPRESSION);
|
||||
|
||||
return expression instanceof LiteralExpression ? null : expression;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handler to collect {@link MongoPersistentProperty} instances and check that each of them is mapped to a distinct
|
||||
* field name.
|
||||
@@ -226,4 +299,61 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
properties.put(fieldName, property);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
private static class PropertyTypeAssertionHandler implements PropertyHandler<MongoPersistentProperty> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.PropertyHandler#doWithPersistentProperty(org.springframework.data.mapping.PersistentProperty)
|
||||
*/
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
potentiallyAssertTextScoreType(persistentProperty);
|
||||
potentiallyAssertLanguageType(persistentProperty);
|
||||
potentiallyAssertDBRefTargetType(persistentProperty);
|
||||
}
|
||||
|
||||
private static void potentiallyAssertLanguageType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isExplicitLanguageProperty()) {
|
||||
assertPropertyType(persistentProperty, String.class);
|
||||
}
|
||||
}
|
||||
|
||||
private static void potentiallyAssertTextScoreType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isTextScoreProperty()) {
|
||||
assertPropertyType(persistentProperty, Float.class, Double.class);
|
||||
}
|
||||
}
|
||||
|
||||
private static void potentiallyAssertDBRefTargetType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isDbReference() && persistentProperty.getDBRef().lazy()) {
|
||||
if (persistentProperty.isArray() || Modifier.isFinal(persistentProperty.getActualType().getModifiers())) {
|
||||
throw new MappingException(String.format(
|
||||
"Invalid lazy DBRef property for %s. Found %s which must not be an array nor a final class.",
|
||||
persistentProperty.getField(), persistentProperty.getActualType()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertPropertyType(MongoPersistentProperty persistentProperty, Class<?>... validMatches) {
|
||||
|
||||
for (Class<?> potentialMatch : validMatches) {
|
||||
if (ClassUtils.isAssignable(potentialMatch, persistentProperty.getActualType())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
throw new MappingException(
|
||||
String.format("Missmatching types for %s. Found %s expected one of %s.", persistentProperty.getField(),
|
||||
persistentProperty.getActualType(), StringUtils.arrayToCommaDelimitedString(validMatches)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,9 +27,10 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
@@ -40,6 +41,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty<MongoPersistentProperty> implements
|
||||
MongoPersistentProperty {
|
||||
@@ -47,20 +49,18 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
private static final Logger LOG = LoggerFactory.getLogger(BasicMongoPersistentProperty.class);
|
||||
|
||||
private static final String ID_FIELD_NAME = "_id";
|
||||
private static final String LANGUAGE_FIELD_NAME = "language";
|
||||
private static final Set<Class<?>> SUPPORTED_ID_TYPES = new HashSet<Class<?>>();
|
||||
private static final Set<String> SUPPORTED_ID_PROPERTY_NAMES = new HashSet<String>();
|
||||
|
||||
private static final Field CAUSE_FIELD;
|
||||
|
||||
static {
|
||||
|
||||
SUPPORTED_ID_TYPES.add(ObjectId.class);
|
||||
SUPPORTED_ID_TYPES.add(String.class);
|
||||
SUPPORTED_ID_TYPES.add(BigInteger.class);
|
||||
|
||||
SUPPORTED_ID_PROPERTY_NAMES.add("id");
|
||||
SUPPORTED_ID_PROPERTY_NAMES.add("_id");
|
||||
|
||||
CAUSE_FIELD = ReflectionUtils.findField(Throwable.class, "cause");
|
||||
}
|
||||
|
||||
private final FieldNamingStrategy fieldNamingStrategy;
|
||||
@@ -86,14 +86,6 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.FooBasicPersistentProperty#isAssociation()
|
||||
*/
|
||||
@Override
|
||||
public boolean isAssociation() {
|
||||
return field.isAnnotationPresent(DBRef.class) || super.isAssociation();
|
||||
}
|
||||
|
||||
/**
|
||||
* Also considers fields as id that are of supported id type and name.
|
||||
*
|
||||
@@ -108,7 +100,7 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
}
|
||||
|
||||
// We need to support a wider range of ID types than just the ones that can be converted to an ObjectId
|
||||
return SUPPORTED_ID_PROPERTY_NAMES.contains(field.getName());
|
||||
return SUPPORTED_ID_PROPERTY_NAMES.contains(getName());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -163,8 +155,7 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getFieldOrder()
|
||||
*/
|
||||
public int getFieldOrder() {
|
||||
org.springframework.data.mongodb.core.mapping.Field annotation = getField().getAnnotation(
|
||||
org.springframework.data.mongodb.core.mapping.Field.class);
|
||||
org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation(org.springframework.data.mongodb.core.mapping.Field.class);
|
||||
return annotation != null ? annotation.order() : Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
@@ -182,7 +173,7 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isDbReference()
|
||||
*/
|
||||
public boolean isDbReference() {
|
||||
return getField().isAnnotationPresent(DBRef.class);
|
||||
return isAnnotationPresent(DBRef.class);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -190,14 +181,33 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDBRef()
|
||||
*/
|
||||
public DBRef getDBRef() {
|
||||
return getField().getAnnotation(DBRef.class);
|
||||
return findAnnotation(DBRef.class);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#usePropertyAccess()
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isLanguageProperty()
|
||||
*/
|
||||
public boolean usePropertyAccess() {
|
||||
return CAUSE_FIELD.equals(getField());
|
||||
@Override
|
||||
public boolean isLanguageProperty() {
|
||||
return getFieldName().equals(LANGUAGE_FIELD_NAME) || isExplicitLanguageProperty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isExplicitLanguageProperty()
|
||||
*/
|
||||
@Override
|
||||
public boolean isExplicitLanguageProperty() {
|
||||
return isAnnotationPresent(Language.class);
|
||||
};
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isTextScoreProperty()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTextScoreProperty() {
|
||||
return isAnnotationPresent(TextScore.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
|
||||
/**
|
||||
@@ -30,6 +31,8 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
private Boolean isIdProperty;
|
||||
private Boolean isAssociation;
|
||||
private String fieldName;
|
||||
private Boolean usePropertyAccess;
|
||||
private Boolean isTransient;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CachingMongoPersistentProperty}.
|
||||
@@ -84,4 +87,32 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#usePropertyAccess()
|
||||
*/
|
||||
@Override
|
||||
public boolean usePropertyAccess() {
|
||||
|
||||
if (this.usePropertyAccess == null) {
|
||||
this.usePropertyAccess = super.usePropertyAccess();
|
||||
}
|
||||
|
||||
return this.usePropertyAccess;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#isTransient()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransient() {
|
||||
|
||||
if (this.isTransient == null) {
|
||||
this.isTransient = super.isTransient();
|
||||
}
|
||||
|
||||
return this.isTransient;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* {@link FieldNamingStrategy} that abbreviates field names by using the very first letter of the camel case parts of
|
||||
* the {@link MongoPersistentProperty}'s name.
|
||||
*
|
||||
* @since 1.3
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class CamelCaseAbbreviatingFieldNamingStrategy implements FieldNamingStrategy {
|
||||
|
||||
private static final String CAMEL_CASE_PATTERN = "(?<!(^|[A-Z]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])";
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.FieldNamingStrategy#getFieldName(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
|
||||
*/
|
||||
public String getFieldName(MongoPersistentProperty property) {
|
||||
|
||||
String[] parts = property.getName().split(CAMEL_CASE_PATTERN);
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
||||
for (String part : parts) {
|
||||
builder.append(part.substring(0, 1).toLowerCase(Locale.US));
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,6 +29,7 @@ import org.springframework.data.annotation.Persistent;
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Oliver Gierke ogierke@vmware.com
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@Persistent
|
||||
@Inherited
|
||||
@@ -37,4 +38,13 @@ import org.springframework.data.annotation.Persistent;
|
||||
public @interface Document {
|
||||
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
* Defines the default language to be used with this document.
|
||||
*
|
||||
* @since 1.6
|
||||
* @return
|
||||
*/
|
||||
String language() default "";
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,22 +15,21 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
/**
|
||||
* SPI interface to determine how to name document fields in cases the field name is not manually defined.
|
||||
*
|
||||
* @see DocumentField
|
||||
* @see PropertyNameFieldNamingStrategy
|
||||
* @see CamelCaseAbbreviatingFieldNamingStrategy
|
||||
* @since 1.3
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface FieldNamingStrategy {
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Mark property as language field.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
@Documented
|
||||
@Target({ ElementType.FIELD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface Language {
|
||||
|
||||
/**
|
||||
* Returns the field name to be used for the given {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param property must not be {@literal null} or empty;
|
||||
* @return
|
||||
*/
|
||||
String getFieldName(MongoPersistentProperty property);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user