Compare commits
148 Commits
1.3.2.RELE
...
1.4.x
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ca297df50 | ||
|
|
22078d9bb0 | ||
|
|
adfa7cdd88 | ||
|
|
88deac4ca7 | ||
|
|
0275e6fb4c | ||
|
|
291b89a8b6 | ||
|
|
fe9d086334 | ||
|
|
cf4db90a7f | ||
|
|
dc67830c6f | ||
|
|
33004f217d | ||
|
|
72adb309ee | ||
|
|
2e493ea8c5 | ||
|
|
f0fc3961d2 | ||
|
|
f80fd92692 | ||
|
|
87ef66cb12 | ||
|
|
332d9d95f4 | ||
|
|
c18d6381aa | ||
|
|
53697179c3 | ||
|
|
7b3acb2890 | ||
|
|
d1ac323e4a | ||
|
|
b1068687bb | ||
|
|
6eae6d3e2c | ||
|
|
abfb98afe1 | ||
|
|
f361368893 | ||
|
|
063438002b | ||
|
|
9b54a5cd39 | ||
|
|
14360f2ab4 | ||
|
|
81c368c851 | ||
|
|
cf3818e04c | ||
|
|
da9870504f | ||
|
|
1285f4f26e | ||
|
|
791938f05d | ||
|
|
1b2d98dd3d | ||
|
|
de364c65ab | ||
|
|
57a74b0427 | ||
|
|
f35df8fe69 | ||
|
|
2d3aac1826 | ||
|
|
15db4ba6ea | ||
|
|
f02ac5ea44 | ||
|
|
86633e01db | ||
|
|
5fe3763f9c | ||
|
|
d1e2b143f3 | ||
|
|
61ab232bc1 | ||
|
|
443cde6236 | ||
|
|
b23796fb45 | ||
|
|
605f7459f7 | ||
|
|
ef6db5970b | ||
|
|
47a5a32713 | ||
|
|
1675528fc7 | ||
|
|
3455cbc634 | ||
|
|
ed779e52b7 | ||
|
|
c70898b019 | ||
|
|
294616432d | ||
|
|
47dd512f95 | ||
|
|
f16e8d85e5 | ||
|
|
eb03ae61f2 | ||
|
|
5be66a3fee | ||
|
|
d88e4c0e3e | ||
|
|
57d1449008 | ||
|
|
8d00a0d926 | ||
|
|
e3fa844488 | ||
|
|
58bee75a6b | ||
|
|
a402395f5c | ||
|
|
9d5f8f3ba0 | ||
|
|
7ebf953063 | ||
|
|
617ebe0ca7 | ||
|
|
7f76789664 | ||
|
|
81e5919ace | ||
|
|
efd74956dc | ||
|
|
49eee40f7e | ||
|
|
8e93b844c7 | ||
|
|
3e64432f1a | ||
|
|
88c968ad36 | ||
|
|
99eefe0773 | ||
|
|
3d4569be14 | ||
|
|
57455c4a26 | ||
|
|
f9e20d12b2 | ||
|
|
4d6152c65e | ||
|
|
d81cc53c12 | ||
|
|
af4b84ea43 | ||
|
|
f9110828bc | ||
|
|
f301837be5 | ||
|
|
4d29d937eb | ||
|
|
86c11bc614 | ||
|
|
be34b4e503 | ||
|
|
ebfa2c5689 | ||
|
|
b245ef2d9e | ||
|
|
5ef40d54bc | ||
|
|
c679dba438 | ||
|
|
fd6e4000b5 | ||
|
|
c12a27a8f8 | ||
|
|
df2184f204 | ||
|
|
e9c8644d23 | ||
|
|
c730b8f479 | ||
|
|
f3b31fc467 | ||
|
|
f778b2554c | ||
|
|
9d292f64b9 | ||
|
|
8ab038f83c | ||
|
|
689552c28e | ||
|
|
9ea9912b23 | ||
|
|
a952ce5d2b | ||
|
|
b88d960893 | ||
|
|
e44d1f5f9a | ||
|
|
2b5e2361a8 | ||
|
|
5737f2d19d | ||
|
|
60494a6904 | ||
|
|
ceb561e3e4 | ||
|
|
e2d0220cea | ||
|
|
ea33e8b8c6 | ||
|
|
506b6a2e85 | ||
|
|
7c0eee9e09 | ||
|
|
332e5eb715 | ||
|
|
39ee9b56e2 | ||
|
|
8fb390ee88 | ||
|
|
df1c4496dc | ||
|
|
b808fd3003 | ||
|
|
ed12298271 | ||
|
|
682798325b | ||
|
|
0e69021486 | ||
|
|
ae7e24f1b6 | ||
|
|
94d4fa613c | ||
|
|
39c9593b39 | ||
|
|
6e5f3661a8 | ||
|
|
2bd78e0bf0 | ||
|
|
dd59cdc59a | ||
|
|
7e471e2301 | ||
|
|
0871a43831 | ||
|
|
710e77dabe | ||
|
|
9c996617e8 | ||
|
|
eebd49ab8d | ||
|
|
fb979b1734 | ||
|
|
b5c88938e0 | ||
|
|
4027770701 | ||
|
|
a120ce2bb1 | ||
|
|
a5d40a049d | ||
|
|
f0f12d5296 | ||
|
|
24e06cf219 | ||
|
|
1b83ff0382 | ||
|
|
fe41202f96 | ||
|
|
78235b4799 | ||
|
|
51ece4353b | ||
|
|
51bab838b0 | ||
|
|
361f9daa45 | ||
|
|
56b23a6dbe | ||
|
|
9e15c17e26 | ||
|
|
a3c77a43b6 | ||
|
|
55169e2e11 | ||
|
|
24672e6bdd |
1
CONTRIBUTING.MD
Normal file
1
CONTRIBUTING.MD
Normal file
@@ -0,0 +1 @@
|
||||
You find the contribution guidelines for Spring Data projects [here](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md).
|
||||
30
README.md
30
README.md
@@ -1,6 +1,6 @@
|
||||
# Spring Data MongoDB
|
||||
|
||||
The primary goal of the [Spring Data](http://www.springsource.org/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
The primary goal of the [Spring Data](http://projects.spring.io/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer.
|
||||
|
||||
@@ -8,12 +8,12 @@ The Spring Data MongoDB project aims to provide a familiar and consistent Spring
|
||||
|
||||
For a comprehensive treatment of all the Spring Data MongoDB features, please refer to:
|
||||
|
||||
* the [User Guide](http://static.springsource.org/spring-data/data-mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://static.springsource.org/spring-data/data-mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](http://www.springsource.org/spring-data/mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use the [forum](http://forum.springsource.org/forumdisplay.php?f=80).
|
||||
* the [User Guide](http://docs.spring.io/spring-data/mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](http://projects.spring.io/spring-data-mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use the [forum](http://forum.spring.io/forum/spring-projects/data/nosql).
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://www.springsource.org/projects).
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://projects.spring.io/).
|
||||
|
||||
|
||||
## Quick Start
|
||||
@@ -26,7 +26,7 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.4.2.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
@@ -36,13 +36,13 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.0.BUILD-SNAPSHOT</version>
|
||||
<version>1.5.0.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>http://repo.springsource.org/libs-snapshot</url>
|
||||
<url>http://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
```
|
||||
|
||||
@@ -53,7 +53,7 @@ MongoTemplate is the central support class for Mongo database operations. It pro
|
||||
* Basic POJO mapping support to and from BSON
|
||||
* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.)
|
||||
* Connection affinity callback
|
||||
* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/dao.html#dao-exceptions).
|
||||
* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions).
|
||||
|
||||
### Spring Data repositories
|
||||
|
||||
@@ -81,7 +81,7 @@ class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new Mongo();
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -94,9 +94,9 @@ class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML:
|
||||
|
||||
```xml
|
||||
<bean id="template" class="org.springframework.data.document.mongodb.MongoTemplate">
|
||||
<bean id="template" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg>
|
||||
<bean class="com.mongodb.Mongo">
|
||||
<bean class="com.mongodb.MongoClient">
|
||||
<constructor-arg value="localhost" />
|
||||
<constructor-arg value="27017" />
|
||||
</bean>
|
||||
@@ -139,9 +139,9 @@ public class MyService {
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
* Get involved with the Spring community on the Spring Community Forums. Please help out on the [forum](http://forum.springsource.org/forumdisplay.php?f=80) by responding to questions and joining the debate.
|
||||
* Get involved with the Spring community on the Spring Community Forums. Please help out on the [forum](http://forum.spring.io/forum/spring-projects/data/nosql) by responding to questions and joining the debate.
|
||||
* Create [JIRA](https://jira.springframework.org/browse/DATADOC) tickets for bugs and new features and comment and vote on the ones that you are interested in.
|
||||
* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing.
|
||||
* Watch for upcoming articles on Spring by [subscribing](http://www.springsource.org/node/feed) to springframework.org
|
||||
* Watch for upcoming articles on Spring by [subscribing](http://spring.io/blog) to spring.io.
|
||||
|
||||
Before we accept a non-trivial patch or pull request we will need you to sign the [contributor's agreement](https://support.springsource.com/spring_committer_signup). Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. Active contributors might be asked to join the core team, and given the ability to merge pull requests.
|
||||
|
||||
86
pom.xml
86
pom.xml
@@ -1,21 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.4.4.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
<description>MongoDB support for Spring Data</description>
|
||||
<url>http://www.springsource.org/spring-data/mongodb</url>
|
||||
<url>http://projects.spring.io/spring-data-mongodb</url>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.2.0.RELEASE</version>
|
||||
<version>1.3.4.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,19 +29,20 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.6.0.RELEASE</springdata.commons>
|
||||
<mongo>2.10.1</mongo>
|
||||
<springdata.commons>1.7.4.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>2.11.4</mongo>
|
||||
<mongo-osgi>${mongo}</mongo-osgi>
|
||||
</properties>
|
||||
|
||||
<developers>
|
||||
<developer>
|
||||
<id>ogierke</id>
|
||||
<name>Oliver Gierke</name>
|
||||
<email>ogierke at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>ogierke at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Project Lean</role>
|
||||
<role>Project Lead</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
@@ -49,8 +50,8 @@
|
||||
<id>trisberg</id>
|
||||
<name>Thomas Risberg</name>
|
||||
<email>trisberg at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
@@ -59,9 +60,9 @@
|
||||
<developer>
|
||||
<id>mpollack</id>
|
||||
<name>Mark Pollack</name>
|
||||
<email>mpollack at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>mpollack at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
@@ -70,16 +71,48 @@
|
||||
<developer>
|
||||
<id>jbrisbin</id>
|
||||
<name>Jon Brisbin</name>
|
||||
<email>jbrisbin at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>jbrisbin at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>-6</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>tdarimont</id>
|
||||
<name>Thomas Darimont</name>
|
||||
<email>tdarimont at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>cstrobl</id>
|
||||
<name>Christoph Strobl</name>
|
||||
<email>cstrobl at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>mongo-next</id>
|
||||
<properties>
|
||||
<mongo>2.12.0</mongo>
|
||||
<mongo-osgi>2.12.0</mongo-osgi>
|
||||
</properties>
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
<!-- MongoDB -->
|
||||
<dependency>
|
||||
@@ -91,9 +124,20 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-lib-release</id>
|
||||
<url>http://repo.springsource.org/libs-release-local</url>
|
||||
<id>spring-libs-snapshopt</id>
|
||||
<url>http://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>http://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>http://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -6,12 +6,12 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.4.4.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Persistence Support</name>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>1.0.0.Final</jpa>
|
||||
@@ -24,7 +24,6 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<version>${spring}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
@@ -35,24 +34,21 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.4.4.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -60,17 +56,13 @@
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>cglib</groupId>
|
||||
<artifactId>cglib</artifactId>
|
||||
<version>2.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate.javax.persistence</groupId>
|
||||
<artifactId>hibernate-jpa-2.0-api</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- For Tests -->
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -41,17 +41,13 @@ import com.mongodb.MongoException;
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
protected final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
@@ -113,12 +109,14 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
|
||||
log.debug("getPersistentId called on " + entity);
|
||||
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
Object o = entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
return o;
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
Bundle-SymbolicName: org.springframework.data.mongodb.crossstore
|
||||
Bundle-Name: Spring Data MongoDB Cross Store Support
|
||||
Bundle-Vendor: SpringSource
|
||||
Bundle-Vendor: Pivotal Software, Inc.
|
||||
Bundle-ManifestVersion: 2
|
||||
Import-Package:
|
||||
sun.reflect;version="0";resolution:=optional
|
||||
Export-Template:
|
||||
org.springframework.data.mongodb.crossstore.*;version="${project.version}"
|
||||
Import-Template:
|
||||
com.mongodb.*;version="0",
|
||||
com.mongodb.*;version="${mongo-osgi:[=.=.=,+1.0.0)}",
|
||||
javax.persistence.*;version="${jpa:[=.=.=,+1.0.0)}",
|
||||
org.aspectj.*;version="${aspectj:[1.0.0, 2.0.0)}",
|
||||
org.bson.*;version="0",
|
||||
org.slf4j.*;version="${slf4j:[=.=.=,+1.0.0)}",
|
||||
org.springframework.*;version="${spring30:[=.=.=.=,+1.0.0)}",
|
||||
org.springframework.*;version="${spring:[=.=.=.=,+1.0.0)}",
|
||||
org.springframework.data.*;version="${springdata.commons:[=.=.=.=,+1.0.0)}",
|
||||
org.springframework.data.mongodb.*;version="${project.version:[=.=.=.=,+1.0.0)}",
|
||||
org.w3c.dom.*;version="0"
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.4.4.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.4.4.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
Bundle-SymbolicName: org.springframework.data.mongodb.log4j
|
||||
Bundle-Name: Spring Data Mongo DB Log4J Appender
|
||||
Bundle-Vendor: SpringSource
|
||||
Bundle-Vendor: Pivotal Software, Inc.
|
||||
Bundle-ManifestVersion: 2
|
||||
Import-Package:
|
||||
sun.reflect;version="0";resolution:=optional
|
||||
Import-Template:
|
||||
com.mongodb.*;version="${mongo:[=.=,+1.0.0)}",
|
||||
com.mongodb.*;version="${mongo-osgi:[=.=.=,+1.0.0)}",
|
||||
org.apache.log4j.*;version="${log4j:[=.=.=,+1.0.0)}"
|
||||
|
||||
@@ -1,153 +1,176 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<context version="7.1.7.187">
|
||||
<scope name="spring-data-mongodb" type="Project">
|
||||
<element name="Filter" type="TypeFilterReferenceOverridden">
|
||||
<element name="org.springframework.data.mongodb.**" type="IncludeTypePattern"/>
|
||||
<context version="7.1.9.205">
|
||||
<scope type="Project" name="spring-data-mongodb">
|
||||
<element type="TypeFilterReferenceOverridden" name="Filter">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.mongodb.**"/>
|
||||
</element>
|
||||
<architecture>
|
||||
<element name="Config" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.config.**" type="WeakTypePattern"/>
|
||||
<element type="Layer" name="Config">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.config.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|GridFS"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Monitoring"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|GridFS" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Monitoring" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Repositories" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.repository.**" type="IncludeTypePattern"/>
|
||||
<element type="Layer" name="Repositories">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.repository.**"/>
|
||||
</element>
|
||||
<element name="API" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.repository.*" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="API">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.repository.*"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Query" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.query.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Query">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.query.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Implementation" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.support.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Implementation">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.support.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Query"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Config" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.config.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Config">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.config.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Implementation"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Implementation" type="AllowedDependency"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Monitoring" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.monitor.**" type="IncludeTypePattern"/>
|
||||
<element type="Layer" name="Monitoring">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.monitor.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="GridFS" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.gridfs.**" type="IncludeTypePattern"/>
|
||||
<element type="Layer" name="GridFS">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.gridfs.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Core" type="Layer">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.core.**" type="IncludeTypePattern"/>
|
||||
<element type="Layer" name="Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.core.**"/>
|
||||
</element>
|
||||
<element name="Mapping" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.mapping.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Mapping">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.mapping.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Geospatial" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.geo.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Geospatial">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.geo.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Query" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.query.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Query">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.query.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Index" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.index.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Conversion">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.convert.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element name="Core" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="**.core.**" type="WeakTypePattern"/>
|
||||
<element type="Subsystem" name="SpEL">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.spel.**"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Index"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Aggregation">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.aggregation.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Conversion" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|SpEL" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Index">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.index.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.core.**"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Aggregation" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Conversion" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Index" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="API" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="org.springframework.data.mongodb.*" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="API">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.mongodb.*"/>
|
||||
</element>
|
||||
<stereotype name="Public"/>
|
||||
</element>
|
||||
</architecture>
|
||||
<workspace>
|
||||
<element name="src/main/java" type="JavaRootDirectory">
|
||||
<element type="JavaRootDirectory" name="src/main/java">
|
||||
<reference name="Project|spring-data-mongodb::BuildUnit|spring-data-mongodb"/>
|
||||
</element>
|
||||
<element name="target/classes" type="JavaRootDirectory">
|
||||
<element type="JavaRootDirectory" name="target/classes">
|
||||
<reference name="Project|spring-data-mongodb::BuildUnit|spring-data-mongodb"/>
|
||||
</element>
|
||||
</workspace>
|
||||
<physical>
|
||||
<element name="spring-data-mongodb" type="BuildUnit"/>
|
||||
<element type="BuildUnit" name="spring-data-mongodb"/>
|
||||
</physical>
|
||||
</scope>
|
||||
<scope name="External" type="External">
|
||||
<element name="Filter" type="TypeFilter">
|
||||
<element name="**" type="IncludeTypePattern"/>
|
||||
<element name="java.**" type="ExcludeTypePattern"/>
|
||||
<element name="javax.**" type="ExcludeTypePattern"/>
|
||||
<scope type="External" name="External">
|
||||
<element type="TypeFilter" name="Filter">
|
||||
<element type="IncludeTypePattern" name="**"/>
|
||||
<element type="ExcludeTypePattern" name="java.**"/>
|
||||
<element type="ExcludeTypePattern" name="javax.**"/>
|
||||
</element>
|
||||
<architecture>
|
||||
<element name="Spring" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="org.springframework.**" type="IncludeTypePattern"/>
|
||||
<element name="org.springframework.data.**" type="ExcludeTypePattern"/>
|
||||
<element type="Subsystem" name="Spring">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.springframework.**"/>
|
||||
<element type="ExcludeTypePattern" name="org.springframework.data.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Spring Data Core" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="org.springframework.data.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Spring Data Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Mongo Java Driver" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="com.mongodb.**" type="IncludeTypePattern"/>
|
||||
<element name="org.bson.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Mongo Java Driver">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.mongodb.**"/>
|
||||
<element type="IncludeTypePattern" name="org.bson.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="Querydsl" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="com.mysema.query.**" type="IncludeTypePattern"/>
|
||||
<element type="Subsystem" name="Querydsl">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.mysema.query.**"/>
|
||||
</element>
|
||||
</element>
|
||||
</architecture>
|
||||
</scope>
|
||||
<scope name="Global" type="Global">
|
||||
<element name="Configuration" type="Configuration"/>
|
||||
<element name="Filter" type="TypeFilter">
|
||||
<element name="**" type="IncludeTypePattern"/>
|
||||
<scope type="Global" name="Global">
|
||||
<element type="Configuration" name="Configuration"/>
|
||||
<element type="TypeFilter" name="Filter">
|
||||
<element type="IncludeTypePattern" name="**"/>
|
||||
</element>
|
||||
</scope>
|
||||
</context>
|
||||
|
||||
@@ -11,12 +11,13 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.4.4.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<validation>1.0.0.GA</validation>
|
||||
<objenesis>1.3</objenesis>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@@ -25,22 +26,18 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>${spring}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
@@ -51,11 +48,10 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-expression</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>spring-data-commons</artifactId>
|
||||
<version>${springdata.commons}</version>
|
||||
@@ -119,6 +115,13 @@
|
||||
<version>${validation}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
<version>${objenesis}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
@@ -134,6 +137,13 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
@@ -142,7 +152,7 @@
|
||||
<plugin>
|
||||
<groupId>com.mysema.maven</groupId>
|
||||
<artifactId>apt-maven-plugin</artifactId>
|
||||
<version>1.0.8</version>
|
||||
<version>${apt}</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.UncategorizedDataAccessException;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class LazyLoadingException extends UncategorizedDataAccessException {
|
||||
|
||||
private static final long serialVersionUID = -7089224903873220037L;
|
||||
|
||||
/**
|
||||
* @param msg
|
||||
* @param cause
|
||||
*/
|
||||
public LazyLoadingException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,23 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.DB;
|
||||
|
||||
@@ -8,6 +25,7 @@ import com.mongodb.DB;
|
||||
* Interface for factories creating {@link DB} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface MongoDbFactory {
|
||||
|
||||
@@ -27,4 +45,11 @@ public interface MongoDbFactory {
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
DB getDb(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
}
|
||||
|
||||
@@ -28,9 +28,12 @@ import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -47,6 +50,7 @@ import com.mongodb.Mongo;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Configuration
|
||||
public abstract class AbstractMongoConfiguration {
|
||||
@@ -58,6 +62,16 @@ public abstract class AbstractMongoConfiguration {
|
||||
*/
|
||||
protected abstract String getDatabaseName();
|
||||
|
||||
/**
|
||||
* Return the name of the authentication database to use. Defaults to {@literal null} and will turn into the value
|
||||
* returned by {@link #getDatabaseName()} later on effectively.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected String getAuthenticationDatabaseName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Mongo} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link Mongo} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
@@ -88,15 +102,8 @@ public abstract class AbstractMongoConfiguration {
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public SimpleMongoDbFactory mongoDbFactory() throws Exception {
|
||||
|
||||
UserCredentials credentials = getUserCredentials();
|
||||
|
||||
if (credentials == null) {
|
||||
return new SimpleMongoDbFactory(mongo(), getDatabaseName());
|
||||
} else {
|
||||
return new SimpleMongoDbFactory(mongo(), getDatabaseName(), credentials);
|
||||
}
|
||||
public MongoDbFactory mongoDbFactory() throws Exception {
|
||||
return new SimpleMongoDbFactory(mongo(), getDatabaseName(), getUserCredentials(), getAuthenticationDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -109,7 +116,9 @@ public abstract class AbstractMongoConfiguration {
|
||||
* entities.
|
||||
*/
|
||||
protected String getMappingBasePackage() {
|
||||
return getClass().getPackage().getName();
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -178,8 +187,11 @@ public abstract class AbstractMongoConfiguration {
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
MappingMongoConverter converter = new MappingMongoConverter(mongoDbFactory(), mongoMappingContext());
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,70 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.data.auditing.DateTimeProvider;
|
||||
import org.springframework.data.domain.AuditorAware;
|
||||
|
||||
/**
|
||||
* Annotation to enable auditing in MongoDB via annotation configuration.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Inherited
|
||||
@Documented
|
||||
@Target(ElementType.TYPE)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Import(MongoAuditingRegistrar.class)
|
||||
public @interface EnableMongoAuditing {
|
||||
|
||||
/**
|
||||
* Configures the {@link AuditorAware} bean to be used to lookup the current principal.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String auditorAwareRef() default "";
|
||||
|
||||
/**
|
||||
* Configures whether the creation and modification dates are set. Defaults to {@literal true}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean setDates() default true;
|
||||
|
||||
/**
|
||||
* Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean modifyOnCreate() default true;
|
||||
|
||||
/**
|
||||
* Configures a {@link DateTimeProvider} bean name that allows customizing the {@link org.joda.time.DateTime} to be
|
||||
* used for setting creation and modification dates.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String dateTimeProviderRef() default "";
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -57,6 +57,7 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
String converterRef = element.getAttribute("converter-ref");
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
String bucket = element.getAttribute("bucket");
|
||||
|
||||
BeanDefinitionBuilder gridFsTemplateBuilder = BeanDefinitionBuilder.genericBeanDefinition(GridFsTemplate.class);
|
||||
|
||||
@@ -72,6 +73,10 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(BeanNames.DEFAULT_CONVERTER_BEAN_NAME);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(bucket)) {
|
||||
gridFsTemplateBuilder.addConstructorArgValue(bucket);
|
||||
}
|
||||
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(gridFsTemplateBuilder, BeanNames.GRID_FS_TEMPLATE)
|
||||
.getBeanDefinition();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -71,11 +71,12 @@ import org.w3c.dom.Element;
|
||||
* @author Oliver Gierke
|
||||
* @author Maciej Walkowiak
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
private static final String BASE_PACKAGE = "base-package";
|
||||
private static final boolean jsr303Present = ClassUtils.isPresent("javax.validation.Validator",
|
||||
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator",
|
||||
MappingMongoConverterParser.class.getClassLoader());
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -83,8 +84,11 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
*/
|
||||
public BeanDefinition parse(Element element, ParserContext parserContext) {
|
||||
|
||||
BeanDefinitionRegistry registry = parserContext.getRegistry();
|
||||
if (parserContext.isNested()) {
|
||||
parserContext.getReaderContext().error("Mongo Converter must not be defined as nested bean.", element);
|
||||
}
|
||||
|
||||
BeanDefinitionRegistry registry = parserContext.getRegistry();
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : "mappingConverter";
|
||||
|
||||
@@ -166,7 +170,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
private RuntimeBeanReference getValidator(Object source, ParserContext parserContext) {
|
||||
|
||||
if (!jsr303Present) {
|
||||
if (!JSR_303_PRESENT) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -195,7 +199,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
BeanDefinitionBuilder mappingContextBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoMappingContext.class);
|
||||
|
||||
Set<String> classesToAdd = getInititalEntityClasses(element, mappingContextBuilder);
|
||||
Set<String> classesToAdd = getInititalEntityClasses(element);
|
||||
|
||||
if (classesToAdd != null) {
|
||||
mappingContextBuilder.addPropertyValue("initialEntitySet", classesToAdd);
|
||||
}
|
||||
@@ -262,7 +267,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return null;
|
||||
}
|
||||
|
||||
private static Set<String> getInititalEntityClasses(Element element, BeanDefinitionBuilder builder) {
|
||||
private static Set<String> getInititalEntityClasses(Element element) {
|
||||
|
||||
String basePackage = element.getAttribute(BASE_PACKAGE);
|
||||
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.springframework.beans.factory.config.BeanDefinition.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AnnotationAuditingConfiguration;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.config.ParsingUtils;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends Annotation> getAnnotation() {
|
||||
return EnableMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
defaultDependenciesIfNecessary(registry, annotationMetadata);
|
||||
super.registerBeanDefinitions(annotationMetadata, registry);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AnnotationAuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AnnotationAuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AnnotationAuditingConfiguration must not be null!");
|
||||
|
||||
return configureDefaultAuditHandlerAttributes(configuration,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class)).addConstructorArgReference(
|
||||
BeanNames.IS_NEW_STRATEGY_FACTORY);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
registerInfrastructureBeanWithId(BeanDefinitionBuilder.rootBeanDefinition(AuditingEventListener.class)
|
||||
.addConstructorArgValue(auditingHandlerDefinition).getRawBeanDefinition(),
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register default bean definitions for a {@link MongoMappingContext} and an {@link IsNewStrategyFactory} in case we
|
||||
* don't find beans with the assumed names in the registry.
|
||||
*
|
||||
* @param registry the {@link BeanDefinitionRegistry} to use to register the components into.
|
||||
* @param source the source which the registered components shall be registered with
|
||||
*/
|
||||
private void defaultDependenciesIfNecessary(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
if (!registry.containsBeanDefinition(MAPPING_CONTEXT)) {
|
||||
|
||||
RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class);
|
||||
definition.setRole(ROLE_INFRASTRUCTURE);
|
||||
definition.setSource(source);
|
||||
|
||||
registry.registerBeanDefinition(MAPPING_CONTEXT, definition);
|
||||
}
|
||||
|
||||
if (!registry.containsBeanDefinition(IS_NEW_STRATEGY_FACTORY)) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(MappingContextIsNewStrategyFactory.class);
|
||||
builder.addConstructorArgReference(MAPPING_CONTEXT);
|
||||
|
||||
AbstractBeanDefinition definition = ParsingUtils.getSourceBeanDefinition(builder, source);
|
||||
definition.setRole(ROLE_INFRASTRUCTURE);
|
||||
|
||||
registry.registerBeanDefinition(IS_NEW_STRATEGY_FACTORY, definition);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 by the original author(s).
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -41,6 +41,7 @@ import com.mongodb.MongoURI;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
@@ -70,6 +71,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
String uri = element.getAttribute("uri");
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
BeanDefinition userCredentials = getUserCredentialsBeanDefinition(element, parserContext);
|
||||
|
||||
// Common setup
|
||||
@@ -92,12 +94,9 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
dbFactoryBuilder.addConstructorArgValue(registerMongoBeanDefinition(element, parserContext));
|
||||
}
|
||||
|
||||
dbname = StringUtils.hasText(dbname) ? dbname : "db";
|
||||
dbFactoryBuilder.addConstructorArgValue(dbname);
|
||||
|
||||
if (userCredentials != null) {
|
||||
dbFactoryBuilder.addConstructorArgValue(userCredentials);
|
||||
}
|
||||
dbFactoryBuilder.addConstructorArgValue(StringUtils.hasText(dbname) ? dbname : "db");
|
||||
dbFactoryBuilder.addConstructorArgValue(userCredentials);
|
||||
dbFactoryBuilder.addConstructorArgValue(element.getAttribute("authentication-dbname"));
|
||||
|
||||
BeanDefinitionBuilder writeConcernPropertyEditorBuilder = getWriteConcernPropertyEditorBuilder();
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -33,6 +33,7 @@ import org.w3c.dom.Element;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
abstract class MongoParsingUtils {
|
||||
|
||||
@@ -79,6 +80,8 @@ abstract class MongoParsingUtils {
|
||||
setPropertyValue(optionsDefBuilder, optionsElement, "write-timeout", "writeTimeout");
|
||||
setPropertyValue(optionsDefBuilder, optionsElement, "write-fsync", "writeFsync");
|
||||
setPropertyValue(optionsDefBuilder, optionsElement, "slave-ok", "slaveOk");
|
||||
setPropertyValue(optionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(optionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
|
||||
mongoBuilder.addPropertyValue("mongoOptions", optionsDefBuilder.getBeanDefinition());
|
||||
return true;
|
||||
|
||||
@@ -16,12 +16,14 @@
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ServerAddress;
|
||||
@@ -35,6 +37,11 @@ import com.mongodb.ServerAddress;
|
||||
*/
|
||||
public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
@@ -77,22 +84,53 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
*/
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
String[] hostAndPort = StringUtils.delimitedListToStringArray(source.trim(), ":");
|
||||
if (!StringUtils.hasText(source)) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(source) || hostAndPort.length > 2) {
|
||||
LOG.warn("Could not parse address source '{}'. Check your replica set configuration!", source);
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return hostAndPort.length == 1 ? new ServerAddress(hostAndPort[0]) : new ServerAddress(hostAndPort[0],
|
||||
Integer.parseInt(hostAndPort[1]));
|
||||
InetAddress hostAddress = InetAddress.getByName(hostAndPort[0]);
|
||||
Integer port = hostAndPort.length == 1 ? null : Integer.parseInt(hostAndPort[1]);
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
LOG.warn("Could not parse host '{}'. Check your replica set configuration!", hostAndPort[0]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
LOG.warn("Could not parse port '{}'. Check your replica set configuration!", hostAndPort[1]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the host and port from the given {@link String}.
|
||||
*
|
||||
* @param addressAndPortSource must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
||||
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null!");
|
||||
|
||||
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
||||
String hostAddress = hostAndPort[0];
|
||||
|
||||
if (isHostAddressInIPv6BracketNotation(hostAddress)) {
|
||||
hostAndPort[0] = hostAddress.substring(1, hostAddress.length() - 1);
|
||||
}
|
||||
|
||||
return hostAndPort;
|
||||
}
|
||||
|
||||
private boolean isHostAddressInIPv6BracketNotation(String hostAddress) {
|
||||
return hostAddress.startsWith("[") && hostAddress.endsWith("]");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@@ -22,7 +24,6 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBCollection;
|
||||
@@ -34,9 +35,13 @@ import com.mongodb.MongoException;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Komi Innocent
|
||||
*/
|
||||
public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
|
||||
@@ -135,12 +140,17 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if (Integer.valueOf(1).equals(value)) {
|
||||
indexFields.add(IndexField.create(key, Order.ASCENDING));
|
||||
} else if (Integer.valueOf(-1).equals(value)) {
|
||||
indexFields.add(IndexField.create(key, Order.DESCENDING));
|
||||
} else if ("2d".equals(value)) {
|
||||
if ("2d".equals(value)) {
|
||||
indexFields.add(IndexField.geo(key));
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,7 @@ import com.mongodb.Mongo;
|
||||
* Mongo server administration exposed via JMX annotations
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@ManagedResource(description = "Mongo Admin Operations")
|
||||
public class MongoAdmin implements MongoAdminOperations {
|
||||
@@ -34,6 +35,7 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
private final Mongo mongo;
|
||||
private String username;
|
||||
private String password;
|
||||
private String authenticationDatabaseName;
|
||||
|
||||
public MongoAdmin(Mongo mongo) {
|
||||
Assert.notNull(mongo);
|
||||
@@ -82,7 +84,16 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the authenticationDatabaseName to use to authenticate with the Mongo database.
|
||||
*
|
||||
* @param authenticationDatabaseName The authenticationDatabaseName to use.
|
||||
*/
|
||||
public void setAuthenticationDatabaseName(String authenticationDatabaseName) {
|
||||
this.authenticationDatabaseName = authenticationDatabaseName;
|
||||
}
|
||||
|
||||
DB getDB(String databaseName) {
|
||||
return MongoDbUtils.getDB(mongo, databaseName, new UserCredentials(username, password));
|
||||
return MongoDbUtils.getDB(mongo, databaseName, new UserCredentials(username, password), authenticationDatabaseName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,34 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
|
||||
/**
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface MongoAdminOperations {
|
||||
|
||||
@ManagedOperation
|
||||
public abstract void dropDatabase(String databaseName);
|
||||
void dropDatabase(String databaseName);
|
||||
|
||||
@ManagedOperation
|
||||
public abstract void createDatabase(String databaseName);
|
||||
void createDatabase(String databaseName);
|
||||
|
||||
@ManagedOperation
|
||||
public abstract String getDatabaseStats(String databaseName);
|
||||
|
||||
}
|
||||
String getDatabaseStats(String databaseName);
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ import com.mongodb.Mongo;
|
||||
* @author Graeme Rocher
|
||||
* @author Oliver Gierke
|
||||
* @author Randy Watler
|
||||
* @author Thomas Darimont
|
||||
* @since 1.0
|
||||
*/
|
||||
public abstract class MongoDbUtils {
|
||||
@@ -54,7 +55,7 @@ public abstract class MongoDbUtils {
|
||||
* @return the {@link DB} connection
|
||||
*/
|
||||
public static DB getDB(Mongo mongo, String databaseName) {
|
||||
return doGetDB(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true);
|
||||
return doGetDB(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true, databaseName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -66,15 +67,22 @@ public abstract class MongoDbUtils {
|
||||
* @return the {@link DB} connection
|
||||
*/
|
||||
public static DB getDB(Mongo mongo, String databaseName, UserCredentials credentials) {
|
||||
return getDB(mongo, databaseName, credentials, databaseName);
|
||||
}
|
||||
|
||||
public static DB getDB(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
String authenticationDatabaseName) {
|
||||
|
||||
Assert.notNull(mongo, "No Mongo instance specified!");
|
||||
Assert.hasText(databaseName, "Database name must be given!");
|
||||
Assert.notNull(credentials, "Credentials must not be null, use UserCredentials.NO_CREDENTIALS!");
|
||||
Assert.hasText(authenticationDatabaseName, "Authentication database name must not be null or empty!");
|
||||
|
||||
return doGetDB(mongo, databaseName, credentials, true);
|
||||
return doGetDB(mongo, databaseName, credentials, true, authenticationDatabaseName);
|
||||
}
|
||||
|
||||
private static DB doGetDB(Mongo mongo, String databaseName, UserCredentials credentials, boolean allowCreate) {
|
||||
private static DB doGetDB(Mongo mongo, String databaseName, UserCredentials credentials, boolean allowCreate,
|
||||
String authenticationDatabaseName) {
|
||||
|
||||
DbHolder dbHolder = (DbHolder) TransactionSynchronizationManager.getResource(mongo);
|
||||
|
||||
@@ -103,14 +111,16 @@ public abstract class MongoDbUtils {
|
||||
DB db = mongo.getDB(databaseName);
|
||||
boolean credentialsGiven = credentials.hasUsername() && credentials.hasPassword();
|
||||
|
||||
synchronized (db) {
|
||||
DB authDb = databaseName.equals(authenticationDatabaseName) ? db : mongo.getDB(authenticationDatabaseName);
|
||||
|
||||
if (credentialsGiven && !db.isAuthenticated()) {
|
||||
synchronized (authDb) {
|
||||
|
||||
if (credentialsGiven && !authDb.isAuthenticated()) {
|
||||
|
||||
String username = credentials.getUsername();
|
||||
String password = credentials.hasPassword() ? credentials.getPassword() : null;
|
||||
|
||||
if (!db.authenticate(username, password == null ? null : password.toCharArray())) {
|
||||
if (!authDb.authenticate(username, password == null ? null : password.toCharArray())) {
|
||||
throw new CannotGetMongoDbConnectionException("Failed to authenticate to database [" + databaseName + "], "
|
||||
+ credentials.toString(), databaseName, credentials);
|
||||
}
|
||||
|
||||
@@ -60,6 +60,11 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
// Driver 2.12 throws this to indicate connection problems. String comparison to avoid hard dependency
|
||||
if (ex.getClass().getName().equals("com.mongodb.MongoServerSelectionException")) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoInternalException) {
|
||||
return new InvalidDataAccessResourceUsageException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
@@ -468,10 +468,32 @@ public interface MongoOperations {
|
||||
*/
|
||||
<T> T findOne(Query query, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Determine result of given {@link Query} contains at least one element.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find a record.
|
||||
* @param collectionName name of the collection to check for objects.
|
||||
* @return
|
||||
*/
|
||||
boolean exists(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Determine result of given {@link Query} contains at least one element.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find a record.
|
||||
* @param entityClass the parameterized type.
|
||||
* @return
|
||||
*/
|
||||
boolean exists(Query query, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Determine result of given {@link Query} contains at least one element.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find a record.
|
||||
* @param entityClass the parameterized type.
|
||||
* @param collectionName name of the collection to check for objects.
|
||||
* @return
|
||||
*/
|
||||
boolean exists(Query query, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -529,12 +551,58 @@ public interface MongoOperations {
|
||||
*/
|
||||
<T> T findById(Object id, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param entityClass the parameterized type.
|
||||
* @return
|
||||
*/
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param entityClass the parameterized type.
|
||||
* @param collectionName the collection to query.
|
||||
* @return
|
||||
*/
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parameterized type.
|
||||
* @return
|
||||
*/
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parameterized type.
|
||||
* @param collectionName the collection to query.
|
||||
* @return
|
||||
*/
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
@@ -598,9 +666,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Spring 3.0's new Type Conversion API.
|
||||
* See <a href="http://static.springsource.org/spring/docs/3.0.x/reference/validation.html#core-convert">Spring 3 Type
|
||||
* Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -655,9 +723,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Spring 3.0's new Type Conversion API.
|
||||
* See <a href="http://static.springsource.org/spring/docs/3.0.x/reference/validation.html#core-convert">Spring 3 Type
|
||||
* Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection
|
||||
*/
|
||||
@@ -672,9 +740,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Spring 3.0's new Type Cobnversion API.
|
||||
* See <a href="http://static.springsource.org/spring/docs/3.0.x/reference/validation.html#core-convert">Spring 3 Type
|
||||
* Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Cobnversion API. See <a
|
||||
* http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert">Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection
|
||||
* @param collectionName name of the collection to store the object in
|
||||
@@ -814,6 +882,14 @@ public interface MongoOperations {
|
||||
*/
|
||||
void remove(Query query, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Remove all documents that match the provided query document criteria from the the collection used to store the
|
||||
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
|
||||
*
|
||||
* @param query
|
||||
* @param entityClass
|
||||
* @param collectionName
|
||||
*/
|
||||
void remove(Query query, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,129 +15,92 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.MongoOptions;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
|
||||
import com.mongodb.MongoOptions;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a MongoOptions instance
|
||||
* A factory bean for construction of a {@link MongoOptions} instance.
|
||||
*
|
||||
* @author Graeme Rocher
|
||||
* @Author Mark Pollack
|
||||
* @author Mark Pollack
|
||||
* @author Mike Saavedra
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, InitializingBean {
|
||||
|
||||
private static final MongoOptions MONGO_OPTIONS = new MongoOptions();
|
||||
/**
|
||||
* number of connections allowed per host will block if run out
|
||||
*/
|
||||
private int connectionsPerHost = MONGO_OPTIONS.connectionsPerHost;
|
||||
private static final MongoOptions DEFAULT_MONGO_OPTIONS = new MongoOptions();
|
||||
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.connectionsPerHost;
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier;
|
||||
private int maxWaitTime = DEFAULT_MONGO_OPTIONS.maxWaitTime;
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.connectTimeout;
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.socketTimeout;
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.socketKeepAlive;
|
||||
private boolean autoConnectRetry = DEFAULT_MONGO_OPTIONS.autoConnectRetry;
|
||||
private long maxAutoConnectRetryTime = DEFAULT_MONGO_OPTIONS.maxAutoConnectRetryTime;
|
||||
private int writeNumber = DEFAULT_MONGO_OPTIONS.w;
|
||||
private int writeTimeout = DEFAULT_MONGO_OPTIONS.wtimeout;
|
||||
private boolean writeFsync = DEFAULT_MONGO_OPTIONS.fsync;
|
||||
private boolean slaveOk = DEFAULT_MONGO_OPTIONS.slaveOk;
|
||||
private boolean ssl;
|
||||
private SSLSocketFactory sslSocketFactory;
|
||||
|
||||
private MongoOptions options;
|
||||
|
||||
/**
|
||||
* multiplier for connectionsPerHost for # of threads that can block if connectionsPerHost is 10, and
|
||||
* threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an exception will
|
||||
* be throw
|
||||
*/
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier;
|
||||
|
||||
/**
|
||||
* max wait time of a blocking thread for a connection
|
||||
*/
|
||||
private int maxWaitTime = MONGO_OPTIONS.maxWaitTime;
|
||||
|
||||
/**
|
||||
* connect timeout in milliseconds. 0 is default and infinite
|
||||
*/
|
||||
private int connectTimeout = MONGO_OPTIONS.connectTimeout;
|
||||
|
||||
/**
|
||||
* socket timeout. 0 is default and infinite
|
||||
*/
|
||||
private int socketTimeout = MONGO_OPTIONS.socketTimeout;
|
||||
|
||||
/**
|
||||
* This controls whether or not to have socket keep alive turned on (SO_KEEPALIVE).
|
||||
* Configures the maximum number of connections allowed per host until we will block.
|
||||
*
|
||||
* defaults to false
|
||||
*/
|
||||
public boolean socketKeepAlive = MONGO_OPTIONS.socketKeepAlive;
|
||||
|
||||
/**
|
||||
* this controls whether or not on a connect, the system retries automatically
|
||||
*/
|
||||
private boolean autoConnectRetry = MONGO_OPTIONS.autoConnectRetry;
|
||||
|
||||
private long maxAutoConnectRetryTime = MONGO_OPTIONS.maxAutoConnectRetryTime;
|
||||
|
||||
/**
|
||||
* This specifies the number of servers to wait for on the write operation, and exception raising behavior.
|
||||
*
|
||||
* Defaults to 0.
|
||||
*/
|
||||
private int writeNumber;
|
||||
|
||||
/**
|
||||
* This controls timeout for write operations in milliseconds.
|
||||
*
|
||||
* Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
|
||||
*/
|
||||
private int writeTimeout;
|
||||
|
||||
/**
|
||||
* This controls whether or not to fsync.
|
||||
*
|
||||
* Defaults to false.
|
||||
*/
|
||||
private boolean writeFsync;
|
||||
|
||||
/**
|
||||
* Specifies if the driver is allowed to read from secondaries or slaves.
|
||||
*
|
||||
* Defaults to false
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
private boolean slaveOk = MONGO_OPTIONS.slaveOk;
|
||||
|
||||
/**
|
||||
* number of connections allowed per host will block if run out
|
||||
* @param connectionsPerHost
|
||||
*/
|
||||
public void setConnectionsPerHost(int connectionsPerHost) {
|
||||
this.connectionsPerHost = connectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* multiplier for connectionsPerHost for # of threads that can block if connectionsPerHost is 10, and
|
||||
* threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an exception will
|
||||
* be throw
|
||||
* A multiplier for connectionsPerHost for # of threads that can block a connection. If connectionsPerHost is 10, and
|
||||
* threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block. If more threads try to block an
|
||||
* exception will be thrown.
|
||||
*
|
||||
* @param threadsAllowedToBlockForConnectionMultiplier
|
||||
*/
|
||||
public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) {
|
||||
this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* max wait time of a blocking thread for a connection
|
||||
* Max wait time of a blocking thread for a connection.
|
||||
*
|
||||
* @param maxWaitTime
|
||||
*/
|
||||
public void setMaxWaitTime(int maxWaitTime) {
|
||||
this.maxWaitTime = maxWaitTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* connect timeout in milliseconds. 0 is default and infinite
|
||||
* Configures the connect timeout in milliseconds. Defaults to 0 (infinite time).
|
||||
*
|
||||
* @param connectTimeout
|
||||
*/
|
||||
public void setConnectTimeout(int connectTimeout) {
|
||||
this.connectTimeout = connectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* socket timeout. 0 is default and infinite
|
||||
* Configures the socket timeout. Defaults to 0 (infinite time).
|
||||
*
|
||||
* @param socketTimeout
|
||||
*/
|
||||
public void setSocketTimeout(int socketTimeout) {
|
||||
this.socketTimeout = socketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls whether or not to have socket keep alive
|
||||
* Configures whether or not to have socket keep alive turned on (SO_KEEPALIVE). Defaults to {@literal false}.
|
||||
*
|
||||
* @param socketKeepAlive
|
||||
*/
|
||||
@@ -152,7 +115,7 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
* <li>-1 = don't even report network errors</li>
|
||||
* <li>0 = default, don't call getLastError by default</li>
|
||||
* <li>1 = basic, call getLastError, but don't wait for slaves</li>
|
||||
* <li>2+= wait for slaves</li>
|
||||
* <li>2 += wait for slaves</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param writeNumber the number of servers to wait for on the write operation, and exception raising behavior.
|
||||
@@ -162,33 +125,33 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls timeout for write operations in milliseconds. The 'wtimeout' option to the getlasterror command.
|
||||
* Configures the timeout for write operations in milliseconds. This defaults to {@literal 0} (indefinite).
|
||||
*
|
||||
* @param writeTimeout Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
|
||||
* @param writeTimeout
|
||||
*/
|
||||
public void setWriteTimeout(int writeTimeout) {
|
||||
this.writeTimeout = writeTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false.
|
||||
* Configures whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to {@literal false}.
|
||||
*
|
||||
* @param writeFsync to fsync on write (true), otherwise false.
|
||||
* @param writeFsync to fsync on <code>write (true)<code>, otherwise {@literal false}.
|
||||
*/
|
||||
public void setWriteFsync(boolean writeFsync) {
|
||||
this.writeFsync = writeFsync;
|
||||
}
|
||||
|
||||
/**
|
||||
* this controls whether or not on a connect, the system retries automatically
|
||||
* Configures whether or not the system retries automatically on a failed connect. This defaults to {@literal false}.
|
||||
*/
|
||||
public void setAutoConnectRetry(boolean autoConnectRetry) {
|
||||
this.autoConnectRetry = autoConnectRetry;
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum amount of time in millisecons to spend retrying to open connection to the same server. Default is 0,
|
||||
* which means to use the default 15s if autoConnectRetry is on.
|
||||
* Configures the maximum amount of time in millisecons to spend retrying to open connection to the same server. This
|
||||
* defaults to {@literal 0}, which means to use the default {@literal 15s} if {@link #autoConnectRetry} is on.
|
||||
*
|
||||
* @param maxAutoConnectRetryTime the maxAutoConnectRetryTime to set
|
||||
*/
|
||||
@@ -197,7 +160,7 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies if the driver is allowed to read from secondaries or slaves. Defaults to false.
|
||||
* Specifies if the driver is allowed to read from secondaries or slaves. Defaults to {@literal false}.
|
||||
*
|
||||
* @param slaveOk true if the driver should read from secondaries or slaves.
|
||||
*/
|
||||
@@ -205,32 +168,81 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
this.slaveOk = slaveOk;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
/**
|
||||
* Specifies if the driver should use an SSL connection to Mongo. This defaults to {@literal false}. By default
|
||||
* {@link SSLSocketFactory#getDefault()} will be used. See {@link #setSslSocketFactory(SSLSocketFactory)} if you want
|
||||
* to configure a custom factory.
|
||||
*
|
||||
* @param ssl true if the driver should use an SSL connection.
|
||||
* @see #setSslSocketFactory(SSLSocketFactory)
|
||||
*/
|
||||
public void setSsl(boolean ssl) {
|
||||
this.ssl = ssl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the {@link SSLSocketFactory} to use for creating SSL connections to Mongo. Defaults to
|
||||
* {@link SSLSocketFactory#getDefault()}. Implicitly activates {@link #setSsl(boolean)} if a non-{@literal null} value
|
||||
* is given.
|
||||
*
|
||||
* @param sslSocketFactory the sslSocketFactory to use.
|
||||
* @see #setSsl(boolean)
|
||||
*/
|
||||
public void setSslSocketFactory(SSLSocketFactory sslSocketFactory) {
|
||||
|
||||
setSsl(sslSocketFactory != null);
|
||||
this.sslSocketFactory = sslSocketFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
public void afterPropertiesSet() {
|
||||
MONGO_OPTIONS.connectionsPerHost = connectionsPerHost;
|
||||
MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
MONGO_OPTIONS.maxWaitTime = maxWaitTime;
|
||||
MONGO_OPTIONS.connectTimeout = connectTimeout;
|
||||
MONGO_OPTIONS.socketTimeout = socketTimeout;
|
||||
MONGO_OPTIONS.socketKeepAlive = socketKeepAlive;
|
||||
MONGO_OPTIONS.autoConnectRetry = autoConnectRetry;
|
||||
MONGO_OPTIONS.maxAutoConnectRetryTime = maxAutoConnectRetryTime;
|
||||
MONGO_OPTIONS.slaveOk = slaveOk;
|
||||
MONGO_OPTIONS.w = writeNumber;
|
||||
MONGO_OPTIONS.wtimeout = writeTimeout;
|
||||
MONGO_OPTIONS.fsync = writeFsync;
|
||||
|
||||
MongoOptions options = new MongoOptions();
|
||||
|
||||
options.connectionsPerHost = connectionsPerHost;
|
||||
options.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
options.maxWaitTime = maxWaitTime;
|
||||
options.connectTimeout = connectTimeout;
|
||||
options.socketTimeout = socketTimeout;
|
||||
options.socketKeepAlive = socketKeepAlive;
|
||||
options.autoConnectRetry = autoConnectRetry;
|
||||
options.maxAutoConnectRetryTime = maxAutoConnectRetryTime;
|
||||
options.slaveOk = slaveOk;
|
||||
options.w = writeNumber;
|
||||
options.wtimeout = writeTimeout;
|
||||
options.fsync = writeFsync;
|
||||
|
||||
if (ssl) {
|
||||
options.setSocketFactory(sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault());
|
||||
}
|
||||
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
public MongoOptions getObject() {
|
||||
return MONGO_OPTIONS;
|
||||
return this.options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
public Class<?> getObjectType() {
|
||||
return MongoOptions.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#isSingleton()
|
||||
*/
|
||||
public boolean isSingleton() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,8 +1,26 @@
|
||||
/*
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.transaction.support.ResourceHolder;
|
||||
import org.springframework.transaction.support.ResourceHolderSynchronization;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class MongoSynchronization extends ResourceHolderSynchronization<ResourceHolder, Object> {
|
||||
|
||||
public MongoSynchronization(ResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -46,6 +46,7 @@ import org.springframework.core.io.ResourceLoader;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.OptimisticLockingFailureException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
@@ -59,6 +60,8 @@ import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
@@ -124,6 +127,7 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Chuong Ngo
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@@ -145,7 +149,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
private final MongoConverter mongoConverter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator();
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
|
||||
@@ -199,6 +203,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Assert.notNull(mongoDbFactory);
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.exceptionTranslator = mongoDbFactory.getExceptionTranslator();
|
||||
this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter(mongoDbFactory) : mongoConverter;
|
||||
this.queryMapper = new QueryMapper(this.mongoConverter);
|
||||
this.updateMapper = new UpdateMapper(this.mongoConverter);
|
||||
@@ -347,7 +352,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch) {
|
||||
executeQuery(query, collectionName, dch, new QueryCursorPreparer(query));
|
||||
executeQuery(query, collectionName, dch, new QueryCursorPreparer(query, null));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -365,12 +370,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
Assert.notNull(query);
|
||||
|
||||
DBObject queryObject = query.getQueryObject();
|
||||
DBObject queryObject = queryMapper.getMappedObject(query.getQueryObject(), null);
|
||||
DBObject sortObject = query.getSortObject();
|
||||
DBObject fieldsObject = query.getFieldsObject();
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: $s",
|
||||
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObject), sortObject, fieldsObject, collectionName));
|
||||
}
|
||||
|
||||
@@ -525,7 +530,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass,
|
||||
new QueryCursorPreparer(query));
|
||||
new QueryCursorPreparer(query, entityClass));
|
||||
}
|
||||
|
||||
public <T> T findById(Object id, Class<T> entityClass) {
|
||||
@@ -607,8 +612,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
public <T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName) {
|
||||
return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), query.getSortObject(),
|
||||
entityClass, update, options);
|
||||
return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(),
|
||||
getMappedSortObject(query, entityClass), entityClass, update, options);
|
||||
}
|
||||
|
||||
// Find methods that take a Query to express the query and that return a single object that is also removed from the
|
||||
@@ -619,8 +624,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public <T> T findAndRemove(Query query, Class<T> entityClass, String collectionName) {
|
||||
return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), query.getSortObject(),
|
||||
entityClass);
|
||||
|
||||
return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(),
|
||||
getMappedSortObject(query, entityClass), entityClass);
|
||||
}
|
||||
|
||||
public long count(Query query, Class<?> entityClass) {
|
||||
@@ -700,10 +706,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
initializeVersionProperty(objectToSave);
|
||||
|
||||
BasicDBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
writer.write(objectToSave, dbDoc);
|
||||
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
Object id = insertDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
@@ -712,6 +717,26 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param objectToSave
|
||||
* @param writer
|
||||
* @return
|
||||
*/
|
||||
private <T> DBObject toDbObject(T objectToSave, MongoWriter<T> writer) {
|
||||
|
||||
if (!(objectToSave instanceof String)) {
|
||||
DBObject dbDoc = new BasicDBObject();
|
||||
writer.write(objectToSave, dbDoc);
|
||||
return dbDoc;
|
||||
} else {
|
||||
try {
|
||||
return (DBObject) JSON.parse((String) objectToSave);
|
||||
} catch (JSONParseException e) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void initializeVersionProperty(Object entity) {
|
||||
|
||||
MongoPersistentEntity<?> mongoPersistentEntity = getPersistentEntity(entity.getClass());
|
||||
@@ -851,19 +876,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
assertUpdateableIdIfNotSet(objectToSave);
|
||||
|
||||
DBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
|
||||
if (!(objectToSave instanceof String)) {
|
||||
writer.write(objectToSave, dbDoc);
|
||||
} else {
|
||||
try {
|
||||
dbDoc = (DBObject) JSON.parse((String) objectToSave);
|
||||
} catch (JSONParseException e) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", e);
|
||||
}
|
||||
}
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
Object id = saveDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
@@ -983,6 +998,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
@@ -1000,7 +1017,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
: collection.update(queryObj, updateObj, upsert, multi, writeConcernToUse);
|
||||
|
||||
if (entity != null && entity.hasVersionProperty() && !multi) {
|
||||
if (writeResult.getN() == 0) {
|
||||
if (writeResult.getN() == 0 && dbObjectContainsVersionProperty(queryObj, entity)) {
|
||||
throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: "
|
||||
+ updateObj.toMap().toString() + " to collection " + collectionName);
|
||||
}
|
||||
@@ -1012,6 +1029,25 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
});
|
||||
}
|
||||
|
||||
private void increaseVersionForUpdateIfNecessary(MongoPersistentEntity<?> persistentEntity, Update update) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
String versionFieldName = persistentEntity.getVersionProperty().getFieldName();
|
||||
if (!update.modifies(versionFieldName)) {
|
||||
update.inc(versionFieldName, 1L);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean dbObjectContainsVersionProperty(DBObject dbObject, MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity == null || !persistentEntity.hasVersionProperty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return dbObject.containsField(persistentEntity.getVersionProperty().getFieldName());
|
||||
}
|
||||
|
||||
public void remove(Object object) {
|
||||
|
||||
if (object == null) {
|
||||
@@ -1154,6 +1190,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
|
||||
String reduceFunction, MapReduceOptions mapReduceOptions, Class<T> entityClass) {
|
||||
|
||||
String mapFunc = replaceWithResourceIfNecessary(mapFunction);
|
||||
String reduceFunc = replaceWithResourceIfNecessary(reduceFunction);
|
||||
DBCollection inputCollection = getCollection(inputCollectionName);
|
||||
@@ -1178,12 +1215,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MapReduceOutput mapReduceOutput = new MapReduceOutput(inputCollection, commandObject, commandResult);
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
|
||||
for (DBObject dbObject : mapReduceOutput.results()) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
|
||||
MapReduceResults<T> mapReduceResult = new MapReduceResults<T>(mappedResults, commandResult);
|
||||
return mapReduceResult;
|
||||
return new MapReduceResults<T>(mappedResults, commandResult);
|
||||
}
|
||||
|
||||
public <T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass) {
|
||||
@@ -1237,15 +1274,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("retval");
|
||||
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
|
||||
for (DBObject dbObject : resultSet) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
GroupByResults<T> groupByResult = new GroupByResults<T>(mappedResults, commandResult);
|
||||
return groupByResult;
|
||||
|
||||
return new GroupByResults<T>(mappedResults, commandResult);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1335,13 +1371,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
"Can not use skip or field specification with map reduce operations");
|
||||
}
|
||||
if (query.getQueryObject() != null) {
|
||||
copyMapReduceOptions.put("query", query.getQueryObject());
|
||||
copyMapReduceOptions.put("query", queryMapper.getMappedObject(query.getQueryObject(), null));
|
||||
}
|
||||
if (query.getLimit() > 0) {
|
||||
copyMapReduceOptions.put("limit", query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
copyMapReduceOptions.put("sort", query.getSortObject());
|
||||
copyMapReduceOptions.put("sort", queryMapper.getMappedObject(query.getSortObject(), null));
|
||||
}
|
||||
}
|
||||
return copyMapReduceOptions;
|
||||
@@ -1538,8 +1574,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
DBObject mappedUpdate = queryMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
@@ -1814,11 +1852,23 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
private static final MongoConverter getDefaultMongoConverter(MongoDbFactory factory) {
|
||||
MappingMongoConverter converter = new MappingMongoConverter(factory, new MongoMappingContext());
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext());
|
||||
converter.afterPropertiesSet();
|
||||
return converter;
|
||||
}
|
||||
|
||||
private DBObject getMappedSortObject(Query query, Class<?> type) {
|
||||
|
||||
if (query == null || query.getSortObject() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
return queryMapper.getMappedObject(query.getSortObject(), entity);
|
||||
}
|
||||
|
||||
// Callback implementations
|
||||
|
||||
/**
|
||||
@@ -2012,9 +2062,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
class QueryCursorPreparer implements CursorPreparer {
|
||||
|
||||
private final Query query;
|
||||
private final Class<?> type;
|
||||
|
||||
public QueryCursorPreparer(Query query, Class<?> type) {
|
||||
|
||||
public QueryCursorPreparer(Query query) {
|
||||
this.query = query;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -2042,7 +2095,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
cursorToUse = cursorToUse.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
cursorToUse = cursorToUse.sort(query.getSortObject());
|
||||
DBObject sortDbo = type != null ? getMappedSortObject(query, type) : query.getSortObject();
|
||||
cursorToUse = cursorToUse.sort(sortDbo);
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
cursorToUse = cursorToUse.hint(query.getHint());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,9 +19,11 @@ import java.net.UnknownHostException;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
@@ -34,6 +36,7 @@ import com.mongodb.WriteConcern;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
|
||||
@@ -41,6 +44,9 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final UserCredentials credentials;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final String authenticationDatabaseName;
|
||||
|
||||
private WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
@@ -50,7 +56,7 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
* @param databaseName database name, not be {@literal null} or empty.
|
||||
*/
|
||||
public SimpleMongoDbFactory(Mongo mongo, String databaseName) {
|
||||
this(mongo, databaseName, UserCredentials.NO_CREDENTIALS, false);
|
||||
this(mongo, databaseName, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -61,7 +67,20 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
* @param credentials username and password.
|
||||
*/
|
||||
public SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials) {
|
||||
this(mongo, databaseName, credentials, false);
|
||||
this(mongo, databaseName, credentials, false, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an instance of SimpleMongoDbFactory given the Mongo instance, database name, and username/password
|
||||
*
|
||||
* @param mongo Mongo instance, must not be {@literal null}.
|
||||
* @param databaseName Database name, must not be {@literal null} or empty.
|
||||
* @param credentials username and password.
|
||||
* @param authenticationDatabaseName the database name to use for authentication
|
||||
*/
|
||||
public SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
String authenticationDatabaseName) {
|
||||
this(mongo, databaseName, credentials, false, authenticationDatabaseName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -72,12 +91,14 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
* @throws UnknownHostException
|
||||
* @see MongoURI
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public SimpleMongoDbFactory(MongoURI uri) throws MongoException, UnknownHostException {
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())), true);
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())),
|
||||
true, uri.getDatabase());
|
||||
}
|
||||
|
||||
private SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
boolean mongoInstanceCreated) {
|
||||
boolean mongoInstanceCreated, String authenticationDatabaseName) {
|
||||
|
||||
Assert.notNull(mongo, "Mongo must not be null");
|
||||
Assert.hasText(databaseName, "Database name must not be empty");
|
||||
@@ -88,6 +109,12 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.credentials = credentials == null ? UserCredentials.NO_CREDENTIALS : credentials;
|
||||
this.exceptionTranslator = new MongoExceptionTranslator();
|
||||
this.authenticationDatabaseName = StringUtils.hasText(authenticationDatabaseName) ? authenticationDatabaseName
|
||||
: databaseName;
|
||||
|
||||
Assert.isTrue(this.authenticationDatabaseName.matches("[\\w-]+"),
|
||||
"Authentication database name must only contain letters, numbers, underscores and dashes!");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -115,7 +142,7 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty.");
|
||||
|
||||
DB db = MongoDbUtils.getDB(mongo, dbName, credentials);
|
||||
DB db = MongoDbUtils.getDB(mongo, dbName, credentials, authenticationDatabaseName);
|
||||
|
||||
if (writeConcern != null) {
|
||||
db.setWriteConcern(writeConcern);
|
||||
@@ -138,4 +165,13 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
private static String parseChars(char[] chars) {
|
||||
return chars == null ? null : String.valueOf(chars);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -48,6 +48,15 @@ public class Aggregation {
|
||||
|
||||
private final List<AggregationOperation> operations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(List<? extends AggregationOperation> operations) {
|
||||
return newAggregation(operations.toArray(new AggregationOperation[operations.size()]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
@@ -57,6 +66,16 @@ public class Aggregation {
|
||||
return new Aggregation(operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static <T> TypedAggregation<T> newAggregation(Class<T> type, List<? extends AggregationOperation> operations) {
|
||||
return newAggregation(type, operations.toArray(new AggregationOperation[operations.size()]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
@@ -227,8 +246,9 @@ public class Aggregation {
|
||||
|
||||
operationDocuments.add(operation.toDBObject(context));
|
||||
|
||||
if (operation instanceof AggregationOperationContext) {
|
||||
context = (AggregationOperationContext) operation;
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), rootContext);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,82 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionNode;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionTransformer;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Interface to type an {@link ExpressionTransformer} to the contained
|
||||
* {@link AggregationExpressionTransformationContext}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
interface AggregationExpressionTransformer extends
|
||||
ExpressionTransformer<AggregationExpressionTransformationContext<ExpressionNode>> {
|
||||
|
||||
/**
|
||||
* A special {@link ExpressionTransformationContextSupport} to be aware of the {@link AggregationOperationContext}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class AggregationExpressionTransformationContext<T extends ExpressionNode> extends
|
||||
ExpressionTransformationContextSupport<T> {
|
||||
|
||||
private final AggregationOperationContext aggregationContext;
|
||||
|
||||
/**
|
||||
* Creates an {@link AggregationExpressionTransformationContext}.
|
||||
*
|
||||
* @param currentNode must not be {@literal null}.
|
||||
* @param parentNode
|
||||
* @param previousOperationObject
|
||||
* @param aggregationContext must not be {@literal null}.
|
||||
*/
|
||||
public AggregationExpressionTransformationContext(T currentNode, ExpressionNode parentNode,
|
||||
DBObject previousOperationObject, AggregationOperationContext context) {
|
||||
|
||||
super(currentNode, parentNode, previousOperationObject);
|
||||
|
||||
Assert.notNull(context, "AggregationOperationContext must not be null!");
|
||||
this.aggregationContext = context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying {@link AggregationOperationContext}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public AggregationOperationContext getAggregationContext() {
|
||||
return aggregationContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link FieldReference} for the current {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public FieldReference getFieldReference() {
|
||||
return aggregationContext.getReference(getCurrentNode().getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,9 +29,10 @@ import org.springframework.util.CompositeIterator;
|
||||
* Value object to capture the fields exposed by an {@link AggregationOperation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ExposedFields implements Iterable<ExposedField> {
|
||||
public final class ExposedFields implements Iterable<ExposedField> {
|
||||
|
||||
private static final List<ExposedField> NO_FIELDS = Collections.emptyList();
|
||||
private static final ExposedFields EMPTY = new ExposedFields(NO_FIELDS, NO_FIELDS);
|
||||
@@ -151,13 +152,47 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes no non-synthetic fields at all.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesNoNonSyntheticFields() {
|
||||
return originalFields.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes a single non-synthetic field only.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesSingleNonSyntheticFieldOnly() {
|
||||
return originalFields.size() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes no fields at all.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesNoFields() {
|
||||
return exposedFieldsCount() == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes a single field only.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean exposesSingleFieldOnly() {
|
||||
return originalFields.size() + syntheticFields.size() == 1;
|
||||
boolean exposesSingleFieldOnly() {
|
||||
return exposedFieldsCount() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
private int exposedFieldsCount() {
|
||||
return originalFields.size() + syntheticFields.size();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -224,14 +259,30 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
return field.getTarget();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#isAliased()
|
||||
*/
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return field.isAliased();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the synthetic
|
||||
*/
|
||||
public boolean isSynthetic() {
|
||||
return synthetic;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field can be referred to using the given name.
|
||||
*
|
||||
* @param input
|
||||
* @param name
|
||||
* @return
|
||||
*/
|
||||
public boolean canBeReferredToBy(String input) {
|
||||
return getTarget().equals(input);
|
||||
public boolean canBeReferredToBy(String name) {
|
||||
return getName().equals(name) || getTarget().equals(name);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -296,6 +347,7 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
public FieldReference(ExposedField field) {
|
||||
|
||||
Assert.notNull(field, "ExposedField must not be null!");
|
||||
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
@@ -305,10 +357,21 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
* @return
|
||||
*/
|
||||
public String getRaw() {
|
||||
|
||||
String target = field.getTarget();
|
||||
return field.synthetic ? target : String.format("%s.%s", Fields.UNDERSCORE_ID, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the referenve value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Object getReferenceValue() {
|
||||
return field.synthetic && !field.isAliased() ? 1 : toString();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,17 +17,38 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Support class to implement {@link AggregationOperation}s that will become an {@link AggregationOperationContext} as
|
||||
* well defining {@link ExposedFields}.
|
||||
* {@link AggregationOperationContext} that combines the available field references from a given
|
||||
* {@code AggregationOperationContext} and an {@link FieldsExposingAggregationOperation}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
* @since 1.4
|
||||
*/
|
||||
public abstract class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final ExposedFields exposedFields;
|
||||
private final AggregationOperationContext rootContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given
|
||||
* {@link AggregationOperationContext} to perform a mapping to mongo types if necessary.
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
* @param rootContext must not be {@literal null}.
|
||||
*/
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, AggregationOperationContext rootContext) {
|
||||
|
||||
Assert.notNull(exposedFields, "ExposedFields must not be null!");
|
||||
Assert.notNull(rootContext, "RootContext must not be null!");
|
||||
|
||||
this.exposedFields = exposedFields;
|
||||
this.rootContext = rootContext;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -35,7 +56,7 @@ public abstract class ExposedFieldsAggregationOperationContext implements Aggreg
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return dbObject;
|
||||
return rootContext.getMappedObject(dbObject);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -44,7 +65,7 @@ public abstract class ExposedFieldsAggregationOperationContext implements Aggreg
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return getReference(field.getTarget());
|
||||
return getReference(field, field.getTarget());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -53,15 +74,44 @@ public abstract class ExposedFieldsAggregationOperationContext implements Aggreg
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
return getReference(null, name);
|
||||
}
|
||||
|
||||
ExposedField field = getFields().getField(name);
|
||||
/**
|
||||
* Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}.
|
||||
*
|
||||
* @param field may be {@literal null}
|
||||
* @param name must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private FieldReference getReference(Field field, String name) {
|
||||
|
||||
if (field != null) {
|
||||
return new FieldReference(field);
|
||||
Assert.notNull(name, "Name must not be null!");
|
||||
|
||||
ExposedField exposedField = exposedFields.getField(name);
|
||||
|
||||
if (exposedField != null) {
|
||||
|
||||
if (field != null) {
|
||||
// we return a FieldReference to the given field directly to make sure that we reference the proper alias here.
|
||||
return new FieldReference(new ExposedField(field, exposedField.isSynthetic()));
|
||||
}
|
||||
|
||||
return new FieldReference(exposedField);
|
||||
}
|
||||
|
||||
if (name.contains(".")) {
|
||||
|
||||
// for nested field references we only check that the root field exists.
|
||||
ExposedField rootField = exposedFields.getField(name.split("\\.")[0]);
|
||||
|
||||
if (rootField != null) {
|
||||
|
||||
// We have to synthetic to true, in order to render the field-name as is.
|
||||
return new FieldReference(new ExposedField(name, true));
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
}
|
||||
|
||||
protected abstract ExposedFields getFields();
|
||||
}
|
||||
|
||||
@@ -36,4 +36,11 @@ public interface Field {
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
String getTarget();
|
||||
|
||||
/**
|
||||
* Returns whether the Field is aliased, which means it has a name set different from the target.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean isAliased();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,13 +32,13 @@ import org.springframework.util.StringUtils;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Fields implements Iterable<Field> {
|
||||
public final class Fields implements Iterable<Field> {
|
||||
|
||||
private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s! Please "
|
||||
+ "customize your field definitions to get to unique field names!";
|
||||
|
||||
public static String UNDERSCORE_ID = "_id";
|
||||
public static String UNDERSCORE_ID_REF = "$_id";
|
||||
public static final String UNDERSCORE_ID = "_id";
|
||||
public static final String UNDERSCORE_ID_REF = "$_id";
|
||||
|
||||
private final List<Field> fields;
|
||||
|
||||
@@ -197,17 +197,30 @@ public class Fields implements Iterable<Field> {
|
||||
|
||||
public AggregationField(String name, String target) {
|
||||
|
||||
Assert.hasText(name, "AggregationField name must not be null or empty!");
|
||||
String nameToSet = cleanUp(name);
|
||||
String targetToSet = cleanUp(target);
|
||||
|
||||
Assert.hasText(nameToSet, "AggregationField name must not be null or empty!");
|
||||
|
||||
if (target == null && name.contains(".")) {
|
||||
this.name = name.substring(name.indexOf(".") + 1);
|
||||
this.target = name;
|
||||
this.name = nameToSet.substring(nameToSet.indexOf('.') + 1);
|
||||
this.target = nameToSet;
|
||||
} else {
|
||||
this.name = name;
|
||||
this.target = target;
|
||||
this.name = nameToSet;
|
||||
this.target = targetToSet;
|
||||
}
|
||||
}
|
||||
|
||||
private static final String cleanUp(String source) {
|
||||
|
||||
if (source == null) {
|
||||
return source;
|
||||
}
|
||||
|
||||
int dollarIndex = source.lastIndexOf('$');
|
||||
return dollarIndex == -1 ? source : source.substring(dollarIndex + 1);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getKey()
|
||||
@@ -224,6 +237,15 @@ public class Fields implements Iterable<Field> {
|
||||
return StringUtils.hasText(this.target) ? this.target : this.name;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#isAliased()
|
||||
*/
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return !getName().equals(getTarget());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperation} that exposes new {@link ExposedFields} that can be used for later aggregation pipeline
|
||||
* {@code AggregationOperation}s.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface FieldsExposingAggregationOperation extends AggregationOperation {
|
||||
|
||||
/**
|
||||
* Returns the fields exposed by the {@link AggregationOperation}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
ExposedFields getFields();
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -38,9 +38,13 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class GroupOperation extends ExposedFieldsAggregationOperationContext implements AggregationOperation {
|
||||
public class GroupOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
/**
|
||||
* Holds the non-synthetic fields which are the fields of the group-id structure.
|
||||
*/
|
||||
private final ExposedFields idFields;
|
||||
|
||||
private final ExposedFields nonSynthecticFields;
|
||||
private final List<Operation> operations;
|
||||
|
||||
/**
|
||||
@@ -50,7 +54,7 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
*/
|
||||
public GroupOperation(Fields fields) {
|
||||
|
||||
this.nonSynthecticFields = ExposedFields.nonSynthetic(fields);
|
||||
this.idFields = ExposedFields.nonSynthetic(fields);
|
||||
this.operations = new ArrayList<Operation>();
|
||||
}
|
||||
|
||||
@@ -74,7 +78,7 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
Assert.notNull(groupOperation, "GroupOperation must not be null!");
|
||||
Assert.notNull(nextOperations, "NextOperations must not be null!");
|
||||
|
||||
this.nonSynthecticFields = groupOperation.nonSynthecticFields;
|
||||
this.idFields = groupOperation.idFields;
|
||||
this.operations = new ArrayList<Operation>(nextOperations.size() + 1);
|
||||
this.operations.addAll(groupOperation.operations);
|
||||
this.operations.addAll(nextOperations);
|
||||
@@ -95,7 +99,7 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GroupOperationBuilder {
|
||||
public static final class GroupOperationBuilder {
|
||||
|
||||
private final GroupOperation groupOperation;
|
||||
private final Operation operation;
|
||||
@@ -261,7 +265,7 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
ExposedFields fields = this.nonSynthecticFields.and(new ExposedField(Fields.UNDERSCORE_ID, true));
|
||||
ExposedFields fields = this.idFields.and(new ExposedField(Fields.UNDERSCORE_ID, true));
|
||||
|
||||
for (Operation operation : operations) {
|
||||
fields = fields.and(operation.asField());
|
||||
@@ -279,16 +283,20 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
|
||||
BasicDBObject operationObject = new BasicDBObject();
|
||||
|
||||
if (nonSynthecticFields.exposesSingleFieldOnly()) {
|
||||
if (idFields.exposesNoNonSyntheticFields()) {
|
||||
|
||||
FieldReference reference = context.getReference(nonSynthecticFields.iterator().next());
|
||||
operationObject.put(Fields.UNDERSCORE_ID, null);
|
||||
|
||||
} else if (idFields.exposesSingleNonSyntheticFieldOnly()) {
|
||||
|
||||
FieldReference reference = context.getReference(idFields.iterator().next());
|
||||
operationObject.put(Fields.UNDERSCORE_ID, reference.toString());
|
||||
|
||||
} else {
|
||||
|
||||
BasicDBObject inner = new BasicDBObject();
|
||||
|
||||
for (ExposedField field : nonSynthecticFields) {
|
||||
for (ExposedField field : idFields) {
|
||||
FieldReference reference = context.getReference(field);
|
||||
inner.put(field.getName(), reference.toString());
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,7 +21,6 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -41,9 +40,11 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ProjectionOperation extends ExposedFieldsAggregationOperationContext implements AggregationOperation {
|
||||
public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
private static final List<Projection> NONE = Collections.emptyList();
|
||||
private static final String EXCLUSION_ERROR = "Exclusion of field %s not allowed. Projections by the mongodb "
|
||||
+ "aggregation framework only support the exclusion of the %s field!";
|
||||
|
||||
private final List<Projection> projections;
|
||||
|
||||
@@ -60,7 +61,7 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
public ProjectionOperation(Fields fields) {
|
||||
this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields, true));
|
||||
this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -114,26 +115,36 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return new ProjectionOperationBuilder(name, this, null);
|
||||
}
|
||||
|
||||
public ExpressionProjectionOperationBuilder andExpression(String expression, Object... params) {
|
||||
return new ExpressionProjectionOperationBuilder(expression, this, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Excludes the given fields from the projection.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param fieldNames must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andExclude(String... fields) {
|
||||
List<FieldProjection> excludeProjections = FieldProjection.from(Fields.fields(fields), false);
|
||||
public ProjectionOperation andExclude(String... fieldNames) {
|
||||
|
||||
for (String fieldName : fieldNames) {
|
||||
Assert.isTrue(Fields.UNDERSCORE_ID.equals(fieldName),
|
||||
String.format(EXCLUSION_ERROR, fieldName, Fields.UNDERSCORE_ID));
|
||||
}
|
||||
|
||||
List<FieldProjection> excludeProjections = FieldProjection.from(Fields.fields(fieldNames), false);
|
||||
return new ProjectionOperation(this.projections, excludeProjections);
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the given fields into the projection.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param fieldNames must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andInclude(String... fields) {
|
||||
public ProjectionOperation andInclude(String... fieldNames) {
|
||||
|
||||
List<FieldProjection> projections = FieldProjection.from(Fields.fields(fields), true);
|
||||
List<FieldProjection> projections = FieldProjection.from(Fields.fields(fieldNames), true);
|
||||
return new ProjectionOperation(this.projections, projections);
|
||||
}
|
||||
|
||||
@@ -147,12 +158,12 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return new ProjectionOperation(this.projections, FieldProjection.from(fields, true));
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#getFields()
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
protected ExposedFields getFields() {
|
||||
public ExposedFields getFields() {
|
||||
|
||||
ExposedFields fields = null;
|
||||
|
||||
@@ -180,12 +191,133 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return new BasicDBObject("$project", fieldObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for {@link ProjectionOperationBuilder}s.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static abstract class AbstractProjectionOperationBuilder implements AggregationOperation {
|
||||
|
||||
protected final Object value;
|
||||
protected final ProjectionOperation operation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AbstractProjectionOperationBuilder} fot the given value and {@link ProjectionOperation}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param operation must not be {@literal null}.
|
||||
*/
|
||||
public AbstractProjectionOperationBuilder(Object value, ProjectionOperation operation) {
|
||||
|
||||
Assert.notNull(value, "value must not be null or empty!");
|
||||
Assert.notNull(operation, "ProjectionOperation must not be null!");
|
||||
|
||||
this.value = value;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return this.operation.toDBObject(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the finally to be applied {@link ProjectionOperation} with the given alias.
|
||||
*
|
||||
* @param alias will never be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public abstract ProjectionOperation as(String alias);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class ExpressionProjectionOperationBuilder extends AbstractProjectionOperationBuilder {
|
||||
|
||||
private final Object[] params;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionProjectionOperationBuilder} for the given value, {@link ProjectionOperation} and
|
||||
* parameters.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param operation must not be {@literal null}.
|
||||
* @param parameters
|
||||
*/
|
||||
public ExpressionProjectionOperationBuilder(Object value, ProjectionOperation operation, Object[] parameters) {
|
||||
|
||||
super(value, operation);
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.AbstractProjectionOperationBuilder#as(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public ProjectionOperation as(String alias) {
|
||||
|
||||
Field expressionField = Fields.field(alias, alias);
|
||||
return this.operation.and(new ExpressionProjection(expressionField, this.value.toString(), params));
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Projection} based on a SpEL expression.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class ExpressionProjection extends Projection {
|
||||
|
||||
private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer();
|
||||
|
||||
private final String expression;
|
||||
private final Object[] params;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionProjection} for the given field, SpEL expression and parameters.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @param expression must not be {@literal null} or empty.
|
||||
* @param parameters must not be {@literal null}.
|
||||
*/
|
||||
public ExpressionProjection(Field field, String expression, Object[] parameters) {
|
||||
|
||||
super(field);
|
||||
|
||||
Assert.hasText(expression, "Expression must not be null!");
|
||||
Assert.notNull(parameters, "Parameters must not be null!");
|
||||
|
||||
this.expression = expression;
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(getExposedField().getName(), TRANSFORMER.transform(expression, context, params));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link ProjectionOperation}s on a field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class ProjectionOperationBuilder implements AggregationOperation {
|
||||
public static class ProjectionOperationBuilder extends AbstractProjectionOperationBuilder {
|
||||
|
||||
private static final String NUMBER_NOT_NULL = "Number must not be null!";
|
||||
private static final String FIELD_REFERENCE_NOT_NULL = "Field reference must not be null!";
|
||||
|
||||
private final String name;
|
||||
private final ProjectionOperation operation;
|
||||
@@ -200,9 +332,7 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
* @param previousProjection the previous operation projection, may be {@literal null}.
|
||||
*/
|
||||
public ProjectionOperationBuilder(String name, ProjectionOperation operation, OperationProjection previousProjection) {
|
||||
|
||||
Assert.hasText(name, "Field name must not be null or empty!");
|
||||
Assert.notNull(operation, "ProjectionOperation must not be null!");
|
||||
super(name, operation);
|
||||
|
||||
this.name = name;
|
||||
this.operation = operation;
|
||||
@@ -237,10 +367,11 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
* @param string
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public ProjectionOperation as(String alias) {
|
||||
|
||||
if (previousProjection != null) {
|
||||
return this.operation.andReplaceLastOneWith(previousProjection.withAlias(alias));
|
||||
if (this.previousProjection != null) {
|
||||
return this.operation.andReplaceLastOneWith(this.previousProjection.withAlias(alias));
|
||||
} else {
|
||||
return this.operation.and(new FieldProjection(Fields.field(alias, name), null));
|
||||
}
|
||||
@@ -254,10 +385,22 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
return project("add", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $add} expression that adds the value of the given field to the previously mentioned field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("add", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $subtract} expression that subtracts the given number to the previously mentioned field.
|
||||
*
|
||||
@@ -270,6 +413,19 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return project("subtract", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $subtract} expression that subtracts the value of the given field to the previously mentioned
|
||||
* field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder minus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("subtract", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $multiply} expression that multiplies the given number with the previously mentioned field.
|
||||
*
|
||||
@@ -278,10 +434,23 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
return project("multiply", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $multiply} expression that multiplies the value of the given field with the previously
|
||||
* mentioned field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("multiply", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $divide} expression that divides the previously mentioned field by the given number.
|
||||
*
|
||||
@@ -290,11 +459,24 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, FIELD_REFERENCE_NOT_NULL);
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("divide", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $divide} expression that divides the value of the given field by the previously mentioned
|
||||
* field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("divide", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the previously mentioned field by the given number and returns
|
||||
* the remainder.
|
||||
@@ -304,12 +486,26 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("mod", number);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the value of the given field by the previously mentioned field
|
||||
* and returns the remainder.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("mod", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
@@ -362,6 +558,7 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
* A {@link FieldProjection} to map a result of a previous {@link AggregationOperation} to a new field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
static class FieldProjection extends Projection {
|
||||
|
||||
@@ -386,20 +583,31 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to easily create {@link FieldProjection}s for the given {@link Fields}. Fields are projected as
|
||||
* references with their given name. A field {@code foo} will be projected as: {@code foo : 1 } .
|
||||
*
|
||||
* @param fields the {@link Fields} to in- or exclude, must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static List<? extends Projection> from(Fields fields) {
|
||||
return from(fields, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to easily create {@link FieldProjection}s for the given {@link Fields}.
|
||||
*
|
||||
* @param fields the {@link Fields} to in- or exclude, must not be {@literal null}.
|
||||
* @param include whether to include or exclude the fields.
|
||||
* @param value to use for the given field.
|
||||
* @return
|
||||
*/
|
||||
public static List<FieldProjection> from(Fields fields, boolean include) {
|
||||
public static List<FieldProjection> from(Fields fields, Object value) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
List<FieldProjection> projections = new ArrayList<FieldProjection>();
|
||||
|
||||
for (Field field : fields) {
|
||||
projections.add(new FieldProjection(field, include ? null : 0));
|
||||
projections.add(new FieldProjection(field, value));
|
||||
}
|
||||
|
||||
return projections;
|
||||
@@ -411,13 +619,24 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(field.getName(), renderFieldValue(context));
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
return new BasicDBObject(field.getName(), value);
|
||||
private Object renderFieldValue(AggregationOperationContext context) {
|
||||
|
||||
// implicit reference or explicit include?
|
||||
if (value == null || Boolean.TRUE.equals(value)) {
|
||||
|
||||
// check whether referenced field exists in the context
|
||||
return context.getReference(field).getReferenceValue();
|
||||
|
||||
} else if (Boolean.FALSE.equals(value)) {
|
||||
|
||||
// render field as excluded
|
||||
return 0;
|
||||
}
|
||||
|
||||
FieldReference reference = context.getReference(field.getTarget());
|
||||
return new BasicDBObject(field.getName(), reference.toString());
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,513 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.springframework.data.mongodb.util.DBObjectUtils.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.core.GenericTypeResolver;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionNode;
|
||||
import org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport;
|
||||
import org.springframework.data.mongodb.core.spel.LiteralNode;
|
||||
import org.springframework.data.mongodb.core.spel.MethodReferenceNode;
|
||||
import org.springframework.data.mongodb.core.spel.OperatorNode;
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.SpelNode;
|
||||
import org.springframework.expression.spel.SpelParserConfiguration;
|
||||
import org.springframework.expression.spel.ast.CompoundExpression;
|
||||
import org.springframework.expression.spel.ast.Indexer;
|
||||
import org.springframework.expression.spel.ast.InlineList;
|
||||
import org.springframework.expression.spel.ast.PropertyOrFieldReference;
|
||||
import org.springframework.expression.spel.standard.SpelExpression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.expression.spel.support.StandardEvaluationContext;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Renders the AST of a SpEL expression as a MongoDB Aggregation Framework projection expression.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
|
||||
// TODO: remove explicit usage of a configuration once SPR-11031 gets fixed
|
||||
private static final SpelParserConfiguration CONFIG = new SpelParserConfiguration(false, false);
|
||||
private static final SpelExpressionParser PARSER = new SpelExpressionParser(CONFIG);
|
||||
private final List<ExpressionNodeConversion<? extends ExpressionNode>> conversions;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SpelExpressionTransformer}.
|
||||
*/
|
||||
public SpelExpressionTransformer() {
|
||||
|
||||
List<ExpressionNodeConversion<? extends ExpressionNode>> conversions = new ArrayList<ExpressionNodeConversion<? extends ExpressionNode>>();
|
||||
conversions.add(new OperatorNodeConversion(this));
|
||||
conversions.add(new LiteralNodeConversion(this));
|
||||
conversions.add(new IndexerNodeConversion(this));
|
||||
conversions.add(new InlineListNodeConversion(this));
|
||||
conversions.add(new PropertyOrFieldReferenceNodeConversion(this));
|
||||
conversions.add(new CompoundExpressionNodeConversion(this));
|
||||
conversions.add(new MethodReferenceNodeConversion(this));
|
||||
|
||||
this.conversions = Collections.unmodifiableList(conversions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms the given SpEL expression to a corresponding MongoDB expression against the given
|
||||
* {@link AggregationOperationContext} {@code context}.
|
||||
* <p>
|
||||
* Exposes the given @{code params} as <code>[0] ... [n]</code>.
|
||||
*
|
||||
* @param expression must not be {@literal null}
|
||||
* @param context must not be {@literal null}
|
||||
* @param params must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Object transform(String expression, AggregationOperationContext context, Object... params) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
Assert.notNull(context, "AggregationOperationContext must not be null!");
|
||||
Assert.notNull(params, "Parameters must not be null!");
|
||||
|
||||
SpelExpression spelExpression = (SpelExpression) PARSER.parseExpression(expression);
|
||||
ExpressionState state = new ExpressionState(new StandardEvaluationContext(params), CONFIG);
|
||||
ExpressionNode node = ExpressionNode.from(spelExpression.getAST(), state);
|
||||
|
||||
return transform(new AggregationExpressionTransformationContext<ExpressionNode>(node, null, null, context));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.spel.ExpressionTransformer#transform(org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport)
|
||||
*/
|
||||
public Object transform(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
return lookupConversionFor(context.getCurrentNode()).convert(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an appropriate {@link ExpressionNodeConversion} for the given {@code node}. Throws an
|
||||
* {@link IllegalArgumentException} if no conversion could be found.
|
||||
*
|
||||
* @param node
|
||||
* @return the appropriate {@link ExpressionNodeConversion} for the given {@link ExpressionNode}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private ExpressionNodeConversion<ExpressionNode> lookupConversionFor(ExpressionNode node) {
|
||||
|
||||
for (ExpressionNodeConversion<? extends ExpressionNode> candidate : conversions) {
|
||||
if (candidate.supports(node)) {
|
||||
return (ExpressionNodeConversion<ExpressionNode>) candidate;
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Unsupported Element: " + node + " Type: " + node.getClass()
|
||||
+ " You probably have a syntax error in your SpEL expression!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract base class for {@link SpelNode} to (Db)-object conversions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static abstract class ExpressionNodeConversion<T extends ExpressionNode> implements
|
||||
AggregationExpressionTransformer {
|
||||
|
||||
private final AggregationExpressionTransformer transformer;
|
||||
private final Class<? extends ExpressionNode> nodeType;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionNodeConversion}.
|
||||
*
|
||||
* @param transformer must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public ExpressionNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
|
||||
Assert.notNull(transformer, "Transformer must not be null!");
|
||||
|
||||
this.nodeType = (Class<? extends ExpressionNode>) GenericTypeResolver.resolveTypeArgument(this.getClass(),
|
||||
ExpressionNodeConversion.class);
|
||||
this.transformer = transformer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current conversion supports the given {@link ExpressionNode}. By default we will match the
|
||||
* node type against the genric type the subclass types the type parameter to.
|
||||
*
|
||||
* @param node will never be {@literal null}.
|
||||
* @return true if {@literal this} conversion can be applied to the given {@code node}.
|
||||
*/
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return nodeType.equals(node.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers the transformation for the given {@link ExpressionNode} and the given current context.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected Object transform(ExpressionNode node, AggregationExpressionTransformationContext<?> context) {
|
||||
|
||||
Assert.notNull(node, "ExpressionNode must not be null!");
|
||||
Assert.notNull(context, "AggregationExpressionTransformationContext must not be null!");
|
||||
|
||||
return transform(node, context.getParentNode(), null, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers the transformation with the given new {@link ExpressionNode}, new parent node, the current operation and
|
||||
* the previous context.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param parent
|
||||
* @param operation
|
||||
* @param context must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected Object transform(ExpressionNode node, ExpressionNode parent, DBObject operation,
|
||||
AggregationExpressionTransformationContext<?> context) {
|
||||
|
||||
Assert.notNull(node, "ExpressionNode must not be null!");
|
||||
Assert.notNull(context, "AggregationExpressionTransformationContext must not be null!");
|
||||
|
||||
return transform(new AggregationExpressionTransformationContext<ExpressionNode>(node, parent, operation,
|
||||
context.getAggregationContext()));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#transform(org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext)
|
||||
*/
|
||||
@Override
|
||||
public Object transform(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
return transformer.transform(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the actual conversion from {@link SpelNode} to the corresponding representation for MongoDB.
|
||||
*
|
||||
* @param context
|
||||
* @return
|
||||
*/
|
||||
protected abstract Object convert(AggregationExpressionTransformationContext<T> context);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts arithmetic operations.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class OperatorNodeConversion extends ExpressionNodeConversion<OperatorNode> {
|
||||
|
||||
public OperatorNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<OperatorNode> context) {
|
||||
|
||||
OperatorNode currentNode = context.getCurrentNode();
|
||||
|
||||
DBObject operationObject = createOperationObjectAndAddToPreviousArgumentsIfNecessary(context, currentNode);
|
||||
Object leftResult = transform(currentNode.getLeft(), currentNode, operationObject, context);
|
||||
|
||||
if (currentNode.isUnaryMinus()) {
|
||||
return convertUnaryMinusOp(context, leftResult);
|
||||
}
|
||||
|
||||
// we deliberately ignore the RHS result
|
||||
transform(currentNode.getRight(), currentNode, operationObject, context);
|
||||
|
||||
return operationObject;
|
||||
}
|
||||
|
||||
private DBObject createOperationObjectAndAddToPreviousArgumentsIfNecessary(
|
||||
AggregationExpressionTransformationContext<OperatorNode> context, OperatorNode currentNode) {
|
||||
|
||||
DBObject nextDbObject = new BasicDBObject(currentNode.getMongoOperator(), new BasicDBList());
|
||||
|
||||
if (!context.hasPreviousOperation()) {
|
||||
return nextDbObject;
|
||||
}
|
||||
|
||||
if (context.parentIsSameOperation()) {
|
||||
|
||||
// same operator applied in a row e.g. 1 + 2 + 3 carry on with the operation and render as $add: [1, 2 ,3]
|
||||
nextDbObject = context.getPreviousOperationObject();
|
||||
} else if (!currentNode.isUnaryOperator()) {
|
||||
|
||||
// different operator -> add context object for next level to list if arguments of previous expression
|
||||
context.addToPreviousOperation(nextDbObject);
|
||||
}
|
||||
|
||||
return nextDbObject;
|
||||
}
|
||||
|
||||
private Object convertUnaryMinusOp(ExpressionTransformationContextSupport<OperatorNode> context, Object leftResult) {
|
||||
|
||||
Object result = leftResult instanceof Number ? leftResult
|
||||
: new BasicDBObject("$multiply", dbList(-1, leftResult));
|
||||
|
||||
if (leftResult != null && context.hasPreviousOperation()) {
|
||||
context.addToPreviousOperation(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#supports(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isMathematicalOperation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts indexed expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class IndexerNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
|
||||
public IndexerNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
return context.addToPreviousOrReturn(context.getCurrentNode().getValue());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isOfType(Indexer.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts in-line list expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class InlineListNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
|
||||
public InlineListNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
|
||||
ExpressionNode currentNode = context.getCurrentNode();
|
||||
|
||||
if (!currentNode.hasChildren()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// just take the first item
|
||||
return transform(currentNode.getChild(0), currentNode, null, context);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isOfType(InlineList.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts property or field reference expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class PropertyOrFieldReferenceNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
|
||||
public PropertyOrFieldReferenceNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#convert(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionTransformationContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
|
||||
String fieldReference = context.getFieldReference().toString();
|
||||
return context.addToPreviousOrReturn(fieldReference);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isOfType(PropertyOrFieldReference.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts literal expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class LiteralNodeConversion extends ExpressionNodeConversion<LiteralNode> {
|
||||
|
||||
public LiteralNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object convert(AggregationExpressionTransformationContext<LiteralNode> context) {
|
||||
|
||||
LiteralNode node = context.getCurrentNode();
|
||||
Object value = node.getValue();
|
||||
|
||||
if (context.hasPreviousOperation()) {
|
||||
|
||||
if (node.isUnaryMinus(context.getParentNode())) {
|
||||
// unary minus operator
|
||||
return NumberUtils.convertNumberToTargetClass(((Number) value).doubleValue() * -1,
|
||||
(Class<Number>) value.getClass()); // retain type, e.g. int to -int
|
||||
}
|
||||
|
||||
return context.addToPreviousOperation(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#supports(org.springframework.expression.spel.SpelNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isLiteral();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts method reference expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class MethodReferenceNodeConversion extends ExpressionNodeConversion<MethodReferenceNode> {
|
||||
|
||||
public MethodReferenceNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<MethodReferenceNode> context) {
|
||||
|
||||
MethodReferenceNode node = context.getCurrentNode();
|
||||
List<Object> args = new ArrayList<Object>();
|
||||
|
||||
for (ExpressionNode childNode : node) {
|
||||
args.add(transform(childNode, context));
|
||||
}
|
||||
|
||||
return context.addToPreviousOrReturn(new BasicDBObject(node.getMethodName(), dbList(args.toArray())));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link ExpressionNodeConversion} that converts method compound expressions.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class CompoundExpressionNodeConversion extends ExpressionNodeConversion<ExpressionNode> {
|
||||
|
||||
public CompoundExpressionNodeConversion(AggregationExpressionTransformer transformer) {
|
||||
super(transformer);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext)
|
||||
*/
|
||||
@Override
|
||||
protected Object convert(AggregationExpressionTransformationContext<ExpressionNode> context) {
|
||||
|
||||
ExpressionNode currentNode = context.getCurrentNode();
|
||||
|
||||
if (currentNode.hasfirstChildNotOfType(Indexer.class)) {
|
||||
// we have a property path expression like: foo.bar -> render as reference
|
||||
return context.addToPreviousOrReturn(context.getFieldReference().toString());
|
||||
}
|
||||
|
||||
return context.addToPreviousOrReturn(currentNode.getValue());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode)
|
||||
*/
|
||||
@Override
|
||||
protected boolean supports(ExpressionNode node) {
|
||||
return node.isOfType(CompoundExpression.class);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -71,9 +71,9 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return mapper.getMappedObject(dbObject, mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
@@ -88,15 +88,16 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
PropertyPath path = PropertyPath.from(name, type);
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
return getReferenceFor(field(path.getLeafProperty().getSegment(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)));
|
||||
return getReferenceFor(field(name));
|
||||
}
|
||||
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
return new FieldReference(new ExposedField(field, true));
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(
|
||||
field.getTarget(), type);
|
||||
Field mappedField = field(propertyPath.getLeafProperty().getName(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE));
|
||||
|
||||
return new FieldReference(new ExposedField(mappedField, true));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class UnwindOperation extends ExposedFieldsAggregationOperationContext implements AggregationOperation {
|
||||
public class UnwindOperation implements AggregationOperation {
|
||||
|
||||
private final ExposedField field;
|
||||
|
||||
@@ -44,15 +44,6 @@ public class UnwindOperation extends ExposedFieldsAggregationOperationContext im
|
||||
this.field = new ExposedField(field, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#getFields()
|
||||
*/
|
||||
@Override
|
||||
protected ExposedFields getFields() {
|
||||
return ExposedFields.from(field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,12 +17,14 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -55,6 +57,7 @@ import org.springframework.util.Assert;
|
||||
* .
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class CustomConversions {
|
||||
|
||||
@@ -66,7 +69,7 @@ public class CustomConversions {
|
||||
private final Set<ConvertiblePair> writingPairs;
|
||||
private final Set<Class<?>> customSimpleTypes;
|
||||
private final SimpleTypeHolder simpleTypeHolder;
|
||||
private final Map<Class<?>, HashMap<Class<?>, CacheValue>> cache;
|
||||
private final ConcurrentMap<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
|
||||
private final List<Object> converters;
|
||||
|
||||
@@ -86,27 +89,33 @@ public class CustomConversions {
|
||||
|
||||
Assert.notNull(converters);
|
||||
|
||||
this.readingPairs = new HashSet<ConvertiblePair>();
|
||||
this.writingPairs = new HashSet<ConvertiblePair>();
|
||||
this.readingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.writingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.customSimpleTypes = new HashSet<Class<?>>();
|
||||
this.cache = new HashMap<Class<?>, HashMap<Class<?>, CacheValue>>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<GenericConverter.ConvertiblePair, CacheValue>();
|
||||
|
||||
this.converters = new ArrayList<Object>();
|
||||
this.converters.add(CustomToStringConverter.INSTANCE);
|
||||
this.converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
this.converters.add(BigIntegerToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
this.converters.add(URLToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToURLConverter.INSTANCE);
|
||||
this.converters.add(DBObjectToStringConverter.INSTANCE);
|
||||
this.converters.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
this.converters.addAll(converters);
|
||||
List<Object> toRegister = new ArrayList<Object>();
|
||||
|
||||
for (Object c : this.converters) {
|
||||
toRegister.addAll(converters);
|
||||
toRegister.add(CustomToStringConverter.INSTANCE);
|
||||
toRegister.add(BigDecimalToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigDecimalConverter.INSTANCE);
|
||||
toRegister.add(BigIntegerToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigIntegerConverter.INSTANCE);
|
||||
toRegister.add(URLToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToURLConverter.INSTANCE);
|
||||
toRegister.add(DBObjectToStringConverter.INSTANCE);
|
||||
toRegister.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
|
||||
// Add user provided converters to make sure they can override the defaults
|
||||
|
||||
for (Object c : toRegister) {
|
||||
registerConversion(c);
|
||||
}
|
||||
|
||||
Collections.reverse(toRegister);
|
||||
|
||||
this.converters = Collections.unmodifiableList(toRegister);
|
||||
this.simpleTypeHolder = new SimpleTypeHolder(customSimpleTypes, MongoSimpleTypes.HOLDER);
|
||||
}
|
||||
|
||||
@@ -194,25 +203,25 @@ public class CustomConversions {
|
||||
*
|
||||
* @param pair
|
||||
*/
|
||||
private void register(ConverterRegistration context) {
|
||||
private void register(ConverterRegistration converterRegistration) {
|
||||
|
||||
ConvertiblePair pair = context.getConvertiblePair();
|
||||
ConvertiblePair pair = converterRegistration.getConvertiblePair();
|
||||
|
||||
if (context.isReading()) {
|
||||
if (converterRegistration.isReading()) {
|
||||
|
||||
readingPairs.add(pair);
|
||||
|
||||
if (LOG.isWarnEnabled() && !context.isSimpleSourceType()) {
|
||||
if (LOG.isWarnEnabled() && !converterRegistration.isSimpleSourceType()) {
|
||||
LOG.warn(String.format(READ_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType()));
|
||||
}
|
||||
}
|
||||
|
||||
if (context.isWriting()) {
|
||||
if (converterRegistration.isWriting()) {
|
||||
|
||||
writingPairs.add(pair);
|
||||
customSimpleTypes.add(pair.getSourceType());
|
||||
|
||||
if (LOG.isWarnEnabled() && !context.isSimpleTargetType()) {
|
||||
if (LOG.isWarnEnabled() && !converterRegistration.isSimpleTargetType()) {
|
||||
LOG.warn(String.format(WRITE_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType()));
|
||||
}
|
||||
}
|
||||
@@ -222,11 +231,11 @@ public class CustomConversions {
|
||||
* Returns the target type to convert to in case we have a custom conversion registered to convert the given source
|
||||
* type into a Mongo native one.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source) {
|
||||
return getCustomWriteTarget(source, null);
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType) {
|
||||
return getCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -234,71 +243,78 @@ public class CustomConversions {
|
||||
* oth the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply return the
|
||||
* first target type matching or {@literal null} if no conversion can be found.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
Assert.notNull(source);
|
||||
return getCustomTarget(source, expectedTargetType, writingPairs);
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write into a Mongo native type. The returned type might
|
||||
* be a subclass oth the given expected type though.
|
||||
* be a subclass of the given expected type though.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> source) {
|
||||
return hasCustomWriteTarget(source, null);
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return hasCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write an object of the given source type into an object
|
||||
* of the given Mongo native target type.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param expectedTargetType
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
return getCustomWriteTarget(source, expectedTargetType) != null;
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return getCustomWriteTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to read the given source into the given target type.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomReadTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
public boolean hasCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(expectedTargetType);
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(requestedTargetType);
|
||||
|
||||
return getCustomReadTarget(source, expectedTargetType) != null;
|
||||
return getCustomReadTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair} for ones that have a source compatible type as source. Additionally
|
||||
* checks assignabilty of the target type if one is given.
|
||||
* checks assignability of the target type if one is given.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType
|
||||
* @param pairs must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @param pairs must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static Class<?> getCustomTarget(Class<?> source, Class<?> expectedTargetType, Iterable<ConvertiblePair> pairs) {
|
||||
private static Class<?> getCustomTarget(Class<?> sourceType, Class<?> requestedTargetType,
|
||||
Iterable<ConvertiblePair> pairs) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(pairs);
|
||||
|
||||
for (ConvertiblePair typePair : pairs) {
|
||||
if (typePair.getSourceType().isAssignableFrom(source)) {
|
||||
if (typePair.getSourceType().isAssignableFrom(sourceType)) {
|
||||
Class<?> targetType = typePair.getTargetType();
|
||||
if (expectedTargetType == null || targetType.isAssignableFrom(expectedTargetType)) {
|
||||
if (requestedTargetType == null || targetType.isAssignableFrom(requestedTargetType)) {
|
||||
return targetType;
|
||||
}
|
||||
}
|
||||
@@ -307,27 +323,33 @@ public class CustomConversions {
|
||||
return null;
|
||||
}
|
||||
|
||||
private Class<?> getCustomReadTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
/**
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Class<?> type = expectedTargetType == null ? PlaceholderType.class : expectedTargetType;
|
||||
Assert.notNull(sourceType);
|
||||
|
||||
Map<Class<?>, CacheValue> map;
|
||||
CacheValue toReturn;
|
||||
|
||||
if ((map = cache.get(source)) == null || (toReturn = map.get(type)) == null) {
|
||||
|
||||
Class<?> target = getCustomTarget(source, type, readingPairs);
|
||||
|
||||
if (cache.get(source) == null) {
|
||||
cache.put(source, new HashMap<Class<?>, CacheValue>());
|
||||
}
|
||||
|
||||
Map<Class<?>, CacheValue> value = cache.get(source);
|
||||
toReturn = target == null ? CacheValue.NULL : new CacheValue(target);
|
||||
value.put(type, toReturn);
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return toReturn.clazz;
|
||||
ConvertiblePair lookupKey = new ConvertiblePair(sourceType, requestedTargetType);
|
||||
CacheValue readTargetTypeValue = customReadTargetTypes.get(lookupKey);
|
||||
|
||||
if (readTargetTypeValue != null) {
|
||||
return readTargetTypeValue.getType();
|
||||
}
|
||||
|
||||
readTargetTypeValue = CacheValue.of(getCustomTarget(sourceType, requestedTargetType, readingPairs));
|
||||
CacheValue cacheValue = customReadTargetTypes.putIfAbsent(lookupKey, readTargetTypeValue);
|
||||
|
||||
return cacheValue != null ? cacheValue.getType() : readTargetTypeValue.getType();
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
@@ -336,8 +358,10 @@ public class CustomConversions {
|
||||
INSTANCE;
|
||||
|
||||
public Set<ConvertiblePair> getConvertibleTypes() {
|
||||
|
||||
ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class);
|
||||
ConvertiblePair booleanToString = new ConvertiblePair(Character.class, String.class);
|
||||
|
||||
return new HashSet<ConvertiblePair>(Arrays.asList(localeToString, booleanToString));
|
||||
}
|
||||
|
||||
@@ -346,29 +370,29 @@ public class CustomConversions {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Placeholder type to allow registering not-found values in the converter cache.
|
||||
*
|
||||
* @author Patryk Wasik
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class PlaceholderType {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper to safely store {@literal null} values in the type cache.
|
||||
*
|
||||
* @author Patryk Wasik
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class CacheValue {
|
||||
|
||||
public static final CacheValue NULL = new CacheValue(null);
|
||||
private final Class<?> clazz;
|
||||
private static final CacheValue ABSENT = new CacheValue(null);
|
||||
|
||||
public CacheValue(Class<?> clazz) {
|
||||
this.clazz = clazz;
|
||||
private final Class<?> type;
|
||||
|
||||
public CacheValue(Class<?> type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public Class<?> getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
static CacheValue of(Class<?> type) {
|
||||
return type == null ? ABSENT : new CacheValue(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Wrapper value object for a {@link BasicDBObject} to be able to access raw values by {@link MongoPersistentProperty}
|
||||
* references. The accessors will transparently resolve nested document values that a {@link MongoPersistentProperty}
|
||||
* might refer to through a path expression in field names.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class DBObjectAccessor {
|
||||
|
||||
private final DBObject dbObject;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DBObjectAccessor} for the given {@link DBObject}.
|
||||
*
|
||||
* @param dbObject must be a {@link BasicDBObject} effectively, must not be {@literal null}.
|
||||
*/
|
||||
public DBObjectAccessor(DBObject dbObject) {
|
||||
|
||||
Assert.notNull(dbObject, "DBObject must not be null!");
|
||||
Assert.isInstanceOf(BasicDBObject.class, dbObject, "Given DBObject must be a BasicDBObject!");
|
||||
|
||||
this.dbObject = dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts the given value into the backing {@link DBObject} based on the coordinates defined through the given
|
||||
* {@link MongoPersistentProperty}. By default this will be the plain field name. But field names might also consist
|
||||
* of path traversals so we might need to create intermediate {@link BasicDBObject}s.
|
||||
*
|
||||
* @param prop must not be {@literal null}.
|
||||
* @param value
|
||||
*/
|
||||
public void put(MongoPersistentProperty prop, Object value) {
|
||||
|
||||
Assert.notNull(prop, "MongoPersistentProperty must not be null!");
|
||||
String fieldName = prop.getFieldName();
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
DBObject dbObject = this.dbObject;
|
||||
|
||||
while (parts.hasNext()) {
|
||||
|
||||
String part = parts.next();
|
||||
|
||||
if (parts.hasNext()) {
|
||||
BasicDBObject nestedDbObject = new BasicDBObject();
|
||||
dbObject.put(part, nestedDbObject);
|
||||
dbObject = nestedDbObject;
|
||||
} else {
|
||||
dbObject.put(part, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value the given {@link MongoPersistentProperty} refers to. By default this will be a direct field but
|
||||
* the method will also transparently resolve nested values the {@link MongoPersistentProperty} might refer to through
|
||||
* a path expression in the field name metadata.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<Object, Object> source = this.dbObject.toMap();
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
|
||||
result = source.get(parts.next());
|
||||
|
||||
if (parts.hasNext()) {
|
||||
source = getAsMap(result);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<Object, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return ((DBObject) source).toMap();
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<Object, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,18 +31,30 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
class DBObjectPropertyAccessor extends MapAccessor {
|
||||
|
||||
static MapAccessor INSTANCE = new DBObjectPropertyAccessor();
|
||||
static final MapAccessor INSTANCE = new DBObjectPropertyAccessor();
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#getSpecificTargetClasses()
|
||||
*/
|
||||
@Override
|
||||
public Class<?>[] getSpecificTargetClasses() {
|
||||
return new Class[] { DBObject.class };
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#canRead(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public boolean canRead(EvaluationContext context, Object target, String name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#read(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public TypedValue read(EvaluationContext context, Object target, String name) {
|
||||
@@ -52,4 +64,4 @@ class DBObjectPropertyAccessor extends MapAccessor {
|
||||
Object value = source.get(name);
|
||||
return value == null ? TypedValue.NULL : new TypedValue(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Used to resolve associations annotated with {@link org.springframework.data.mongodb.core.mapping.DBRef}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
public interface DbRefResolver {
|
||||
|
||||
/**
|
||||
* Resolves the given {@link DBRef} into an object of the given {@link MongoPersistentProperty}'s type. The method
|
||||
* might return a proxy object for the {@link DBRef} or resolve it immediately. In both cases the
|
||||
* {@link DbRefResolverCallback} will be used to obtain the actual backing object.
|
||||
*
|
||||
* @param property will never be {@literal null}.
|
||||
* @param dbref the {@link DBRef} to resolve.
|
||||
* @param callback will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback);
|
||||
|
||||
/**
|
||||
* Creates a {@link DBRef} instance for the given {@link org.springframework.data.mongodb.core.mapping.DBRef}
|
||||
* annotation, {@link MongoPersistentEntity} and id.
|
||||
*
|
||||
* @param annotation will never be {@literal null}.
|
||||
* @param entity will never be {@literal null}.
|
||||
* @param id will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, MongoPersistentEntity<?> entity,
|
||||
Object id);
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
/**
|
||||
* Callback interface to be used in conjunction with {@link DbRefResolver}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface DbRefResolverCallback {
|
||||
|
||||
/**
|
||||
* Resolve the final object for the given {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param property will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object resolve(MongoPersistentProperty property);
|
||||
}
|
||||
@@ -0,0 +1,479 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.objenesis.Objenesis;
|
||||
import org.objenesis.ObjenesisStd;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* A {@link DbRefResolver} that resolves {@link org.springframework.data.mongodb.core.mapping.DBRef}s by delegating to a
|
||||
* {@link DbRefResolverCallback} than is able to generate lazy loading proxies.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
private static final boolean OBJENESIS_PRESENT = ClassUtils.isPresent("org.objenesis.Objenesis", null);
|
||||
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public DefaultDbRefResolver(MongoDbFactory mongoDbFactory) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.exceptionTranslator = mongoDbFactory.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DbRefResolver#resolveDbRef(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.convert.DbRefResolverCallback)
|
||||
*/
|
||||
@Override
|
||||
public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
if (isLazyDbRef(property)) {
|
||||
return createLazyLoadingProxy(property, dbref, callback);
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DbRefResolver#created(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation,
|
||||
MongoPersistentEntity<?> entity, Object id) {
|
||||
|
||||
DB db = mongoDbFactory.getDb();
|
||||
db = annotation != null && StringUtils.hasText(annotation.db()) ? mongoDbFactory.getDb(annotation.db()) : db;
|
||||
|
||||
return new DBRef(db, entity.getCollection(), id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a proxy for the given {@link MongoPersistentProperty} using the given {@link DbRefResolverCallback} to
|
||||
* eventually resolve the value of the property.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param dbref can be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback) {
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
Class<?> propertyType = property.getType();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, dbref, exceptionTranslator, callback);
|
||||
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
|
||||
if (propertyType.isInterface()) {
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
return proxyFactory.getProxy();
|
||||
}
|
||||
|
||||
proxyFactory.setProxyTargetClass(true);
|
||||
proxyFactory.setTargetClass(propertyType);
|
||||
|
||||
if (!OBJENESIS_PRESENT) {
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
return proxyFactory.getProxy();
|
||||
}
|
||||
|
||||
return ObjenesisProxyEnhancer.enhanceAndGet(proxyFactory, propertyType, interceptor);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the property shall be resolved lazily.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private boolean isLazyDbRef(MongoPersistentProperty property) {
|
||||
return property.getDBRef() != null && property.getDBRef().lazy();
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link MethodInterceptor} that is used within a lazy loading proxy. The property resolving is delegated to a
|
||||
* {@link DbRefResolverCallback}. The resolving process is triggered by a method invocation on the proxy and is
|
||||
* guaranteed to be performed only once.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor,
|
||||
Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD;
|
||||
|
||||
private final DbRefResolverCallback callback;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private volatile boolean resolved;
|
||||
private Object result;
|
||||
private DBRef dbref;
|
||||
|
||||
static {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("initialize");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LazyLoadingInterceptor} for the given {@link MongoPersistentProperty},
|
||||
* {@link PersistenceExceptionTranslator} and {@link DbRefResolverCallback}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param dbref can be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
*/
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, DBRef dbref,
|
||||
PersistenceExceptionTranslator exceptionTranslator, DbRefResolverCallback callback) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "Exception translator must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
this.dbref = dbref;
|
||||
this.callback = callback;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
@Override
|
||||
public Object invoke(MethodInvocation invocation) throws Throwable {
|
||||
return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.cglib.proxy.MethodInterceptor#intercept(java.lang.Object, java.lang.reflect.Method, java.lang.Object[], org.springframework.cglib.proxy.MethodProxy)
|
||||
*/
|
||||
@Override
|
||||
public Object intercept(Object obj, Method method, Object[] args, MethodProxy proxy) throws Throwable {
|
||||
|
||||
if (INITIALIZE_METHOD.equals(method)) {
|
||||
return ensureResolved();
|
||||
}
|
||||
|
||||
if (TO_DBREF_METHOD.equals(method)) {
|
||||
return this.dbref;
|
||||
}
|
||||
|
||||
if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) {
|
||||
|
||||
if (ReflectionUtils.isToStringMethod(method)) {
|
||||
return proxyToString(proxy);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isEqualsMethod(method)) {
|
||||
return proxyEquals(proxy, args[0]);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode(proxy);
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
if (target == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return method.invoke(target, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a to string representation for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private String proxyToString(Object proxy) {
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (dbref != null) {
|
||||
description.append(dbref.getRef());
|
||||
description.append(":");
|
||||
description.append(dbref.getId());
|
||||
} else {
|
||||
description.append(System.identityHashCode(proxy));
|
||||
}
|
||||
description.append("$").append(LazyLoadingProxy.class.getSimpleName());
|
||||
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hashcode for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private int proxyHashCode(Object proxy) {
|
||||
return proxyToString(proxy).hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an equality check for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @param that
|
||||
* @return
|
||||
*/
|
||||
private boolean proxyEquals(Object proxy, Object that) {
|
||||
|
||||
if (!(that instanceof LazyLoadingProxy)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (that == proxy) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return proxyToString(proxy).equals(that.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Will trigger the resolution if the proxy is not resolved already or return a previously resolved result.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
this.result = resolve();
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for serialization.
|
||||
*
|
||||
* @param out
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for deserialization.
|
||||
*
|
||||
* @param in
|
||||
* @throws IOException
|
||||
*/
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true;
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the proxy into its backing object.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private synchronized Object resolve() {
|
||||
|
||||
if (!resolved) {
|
||||
|
||||
try {
|
||||
|
||||
return callback.resolve(property);
|
||||
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!", translatedException);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Static class to accommodate optional dependency on Objenesis.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @since 1.4
|
||||
*/
|
||||
private static class ObjenesisProxyEnhancer {
|
||||
|
||||
private static final boolean IS_SPRING_4_OR_BETTER = ClassUtils.isPresent(
|
||||
"org.springframework.core.DefaultParameterNameDiscoverer", null);
|
||||
|
||||
private static final InstanceCreatorStrategy INSTANCE_CREATOR;
|
||||
|
||||
static {
|
||||
|
||||
if (IS_SPRING_4_OR_BETTER) {
|
||||
INSTANCE_CREATOR = new Spring4ObjenesisInstanceCreatorStrategy();
|
||||
} else {
|
||||
INSTANCE_CREATOR = new DefaultObjenesisInstanceCreatorStrategy();
|
||||
}
|
||||
}
|
||||
|
||||
public static Object enhanceAndGet(ProxyFactory proxyFactory, Class<?> type,
|
||||
org.springframework.cglib.proxy.MethodInterceptor interceptor) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
Factory factory = (Factory) INSTANCE_CREATOR.newInstance(enhancer.createClass());
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
return factory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Strategy for constructing new instances of a given {@link Class}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
interface InstanceCreatorStrategy {
|
||||
Object newInstance(Class<?> clazz);
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link InstanceCreatorStrategy} that uses Objenesis from the classpath.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class DefaultObjenesisInstanceCreatorStrategy implements InstanceCreatorStrategy {
|
||||
|
||||
private static final Objenesis OBJENESIS = new ObjenesisStd(true);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.ObjenesisProxyEnhancer.InstanceCreatorStrategy#newInstance(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Object newInstance(Class<?> clazz) {
|
||||
return OBJENESIS.newInstance(clazz);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link InstanceCreatorStrategy} that uses a repackaged version of Objenesis from Spring 4.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class Spring4ObjenesisInstanceCreatorStrategy implements InstanceCreatorStrategy {
|
||||
|
||||
private static final String SPRING4_OBJENESIS_CLASS_NAME = "org.springframework.objenesis.ObjenesisStd";
|
||||
private static final Object OBJENESIS;
|
||||
private static final Method NEW_INSTANCE_METHOD;
|
||||
|
||||
static {
|
||||
|
||||
try {
|
||||
Class<?> objenesisClass = ClassUtils.forName(SPRING4_OBJENESIS_CLASS_NAME,
|
||||
ObjenesisProxyEnhancer.class.getClassLoader());
|
||||
|
||||
OBJENESIS = BeanUtils.instantiateClass(objenesisClass.getConstructor(boolean.class), true);
|
||||
NEW_INSTANCE_METHOD = objenesisClass.getMethod("newInstance", Class.class);
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Could not setup Objenesis infrastructure with Spring 4 ", e);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.ObjenesisProxyEnhancer.InstanceCreatorStrategy#newInstance(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Object newInstance(Class<?> clazz) {
|
||||
|
||||
try {
|
||||
return NEW_INSTANCE_METHOD.invoke(OBJENESIS, clazz);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Could not created instance for " + clazz, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Allows direct interaction with the underlying {@link LazyLoadingInterceptor}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public interface LazyLoadingProxy {
|
||||
|
||||
/**
|
||||
* Initializes the proxy and returns the wrapped value.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
Object initialize();
|
||||
|
||||
/**
|
||||
* Returns the {@link DBRef} represented by this {@link LazyLoadingProxy}, may be null.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
DBRef toDBRef();
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
* Copyright 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,6 +19,8 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -57,11 +59,9 @@ import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
@@ -73,15 +73,16 @@ import com.mongodb.DBRef;
|
||||
* @author Jon Brisbin
|
||||
* @author Patrik Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
protected static final Logger log = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
|
||||
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
protected final SpelExpressionParser spelExpressionParser = new SpelExpressionParser();
|
||||
protected final MongoDbFactory mongoDbFactory;
|
||||
protected final QueryMapper idMapper;
|
||||
protected final DbRefResolver dbRefResolver;
|
||||
protected ApplicationContext applicationContext;
|
||||
protected boolean useFieldAccessOnly = true;
|
||||
protected MongoTypeMapper typeMapper;
|
||||
@@ -90,21 +91,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
private SpELContext spELContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}.
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}.
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public MappingMongoConverter(MongoDbFactory mongoDbFactory,
|
||||
public MappingMongoConverter(DbRefResolver dbRefResolver,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(ConversionServiceFactory.createDefaultConversionService());
|
||||
|
||||
Assert.notNull(mongoDbFactory);
|
||||
Assert.notNull(mappingContext);
|
||||
Assert.notNull(dbRefResolver, "DbRefResolver must not be null!");
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.dbRefResolver = dbRefResolver;
|
||||
this.mappingContext = mappingContext;
|
||||
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext);
|
||||
this.idMapper = new QueryMapper(this);
|
||||
@@ -112,6 +113,19 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.spELContext = new SpELContext(DBObjectPropertyAccessor.INSTANCE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}.
|
||||
*
|
||||
* @deprecated use the constructor taking a {@link DbRefResolver} instead.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MappingMongoConverter(MongoDbFactory mongoDbFactory,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
this(new DefaultDbRefResolver(mongoDbFactory), mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link MongoTypeMapper} to be used to add type information to {@link DBObject}s created by the
|
||||
* converter and how to lookup type information from {@link DBObject}s when reading them. Uses a
|
||||
@@ -234,7 +248,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
parent);
|
||||
}
|
||||
|
||||
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, Object parent) {
|
||||
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, final Object parent) {
|
||||
|
||||
final DefaultSpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(dbo, spELContext);
|
||||
|
||||
@@ -261,11 +275,20 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
// Handle associations
|
||||
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
MongoPersistentProperty inverseProp = association.getInverse();
|
||||
Object obj = getValueInternal(inverseProp, dbo, evaluator, result);
|
||||
|
||||
wrapper.setProperty(inverseProp, obj);
|
||||
MongoPersistentProperty property = association.getInverse();
|
||||
|
||||
Object value = dbo.get(property.getName());
|
||||
DBRef dbref = value instanceof DBRef ? (DBRef) value : null;
|
||||
Object obj = dbRefResolver.resolveDbRef(property, dbref, new DbRefResolverCallback() {
|
||||
|
||||
@Override
|
||||
public Object resolve(MongoPersistentProperty property) {
|
||||
return getValueInternal(property, dbo, evaluator, parent);
|
||||
}
|
||||
});
|
||||
|
||||
wrapper.setProperty(property, obj);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -285,7 +308,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef!");
|
||||
}
|
||||
|
||||
return createDBRef(object, annotation);
|
||||
// @see DATAMONGO-913
|
||||
if (object instanceof LazyLoadingProxy) {
|
||||
return ((LazyLoadingProxy) object).toDBRef();
|
||||
}
|
||||
|
||||
return createDBRef(object, referingProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -382,10 +410,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Object propertyObj = wrapper.getProperty(prop, prop.getType(), fieldAccessOnly);
|
||||
|
||||
if (null != propertyObj) {
|
||||
|
||||
if (!conversions.isSimpleType(propertyObj.getClass())) {
|
||||
writePropertyInternal(propertyObj, dbo, prop);
|
||||
} else {
|
||||
writeSimpleInternal(propertyObj, dbo, prop.getFieldName());
|
||||
writeSimpleInternal(propertyObj, dbo, prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -410,46 +439,68 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
String name = prop.getFieldName();
|
||||
DBObjectAccessor accessor = new DBObjectAccessor(dbo);
|
||||
|
||||
TypeInformation<?> valueType = ClassTypeInformation.from(obj.getClass());
|
||||
TypeInformation<?> type = prop.getTypeInformation();
|
||||
|
||||
if (valueType.isCollectionLike()) {
|
||||
DBObject collectionInternal = createCollection(asCollection(obj), prop);
|
||||
dbo.put(name, collectionInternal);
|
||||
accessor.put(prop, collectionInternal);
|
||||
return;
|
||||
}
|
||||
|
||||
if (valueType.isMap()) {
|
||||
DBObject mapDbObj = createMap((Map<Object, Object>) obj, prop);
|
||||
dbo.put(name, mapDbObj);
|
||||
accessor.put(prop, mapDbObj);
|
||||
return;
|
||||
}
|
||||
|
||||
if (prop.isDbReference()) {
|
||||
DBRef dbRefObj = createDBRef(obj, prop.getDBRef());
|
||||
|
||||
DBRef dbRefObj = null;
|
||||
|
||||
/*
|
||||
* If we already have a LazyLoadingProxy, we use it's cached DBRef value instead of
|
||||
* unnecessarily initializing it only to convert it to a DBRef a few instructions later.
|
||||
*/
|
||||
if (obj instanceof LazyLoadingProxy) {
|
||||
dbRefObj = ((LazyLoadingProxy) obj).toDBRef();
|
||||
}
|
||||
|
||||
dbRefObj = dbRefObj != null ? dbRefObj : createDBRef(obj, prop);
|
||||
|
||||
if (null != dbRefObj) {
|
||||
dbo.put(name, dbRefObj);
|
||||
accessor.put(prop, dbRefObj);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* If we have a LazyLoadingProxy we make sure it is initialized first.
|
||||
*/
|
||||
if (obj instanceof LazyLoadingProxy) {
|
||||
obj = ((LazyLoadingProxy) obj).initialize();
|
||||
}
|
||||
|
||||
// Lookup potential custom target type
|
||||
Class<?> basicTargetType = conversions.getCustomWriteTarget(obj.getClass(), null);
|
||||
|
||||
if (basicTargetType != null) {
|
||||
dbo.put(name, conversionService.convert(obj, basicTargetType));
|
||||
accessor.put(prop, conversionService.convert(obj, basicTargetType));
|
||||
return;
|
||||
}
|
||||
|
||||
BasicDBObject propDbObj = new BasicDBObject();
|
||||
Object existingValue = accessor.get(prop);
|
||||
BasicDBObject propDbObj = existingValue instanceof BasicDBObject ? (BasicDBObject) existingValue
|
||||
: new BasicDBObject();
|
||||
addCustomTypeKeyIfNecessary(type, obj, propDbObj);
|
||||
|
||||
MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass()) ? mappingContext
|
||||
.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type);
|
||||
|
||||
writeInternal(obj, propDbObj, entity);
|
||||
dbo.put(name, propDbObj);
|
||||
accessor.put(prop, propDbObj);
|
||||
}
|
||||
|
||||
private boolean isSubtype(Class<?> left, Class<?> right) {
|
||||
@@ -494,7 +545,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
continue;
|
||||
}
|
||||
|
||||
DBRef dbRef = createDBRef(element, property.getDBRef());
|
||||
DBRef dbRef = createDBRef(element, property);
|
||||
dbList.add(dbRef);
|
||||
}
|
||||
|
||||
@@ -527,7 +578,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
dbObject.put(simpleKey, value != null ? createDBRef(value, property.getDBRef()) : null);
|
||||
dbObject.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
|
||||
} else {
|
||||
throw new MappingException("Cannot use a complex object as a key value.");
|
||||
@@ -647,7 +698,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*/
|
||||
protected void addCustomTypeKeyIfNecessary(TypeInformation<?> type, Object value, DBObject dbObject) {
|
||||
|
||||
TypeInformation<?> actualType = type != null ? type.getActualType() : type;
|
||||
TypeInformation<?> actualType = type != null ? type.getActualType() : null;
|
||||
Class<?> reference = actualType == null ? Object.class : actualType.getType();
|
||||
|
||||
boolean notTheSameClass = !value.getClass().equals(reference);
|
||||
@@ -667,6 +718,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
dbObject.put(key, getPotentiallyConvertedSimpleWrite(value));
|
||||
}
|
||||
|
||||
private void writeSimpleInternal(Object value, DBObject dbObject, MongoPersistentProperty property) {
|
||||
DBObjectAccessor accessor = new DBObjectAccessor(dbObject);
|
||||
accessor.put(property, getPotentiallyConvertedSimpleWrite(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether we have a custom conversion registered for the given value into an arbitrary simple Mongo type.
|
||||
* Returns the converted value if so. If not, we perform special enum handling or simply return the value as is.
|
||||
@@ -715,7 +771,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return target.isAssignableFrom(value.getClass()) ? value : conversionService.convert(value, target);
|
||||
}
|
||||
|
||||
protected DBRef createDBRef(Object target, org.springframework.data.mongodb.core.mapping.DBRef dbref) {
|
||||
protected DBRef createDBRef(Object target, MongoPersistentProperty property) {
|
||||
|
||||
Assert.notNull(target);
|
||||
|
||||
@@ -724,6 +780,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> targetEntity = mappingContext.getPersistentEntity(target.getClass());
|
||||
targetEntity = targetEntity == null ? targetEntity = mappingContext.getPersistentEntity(property) : targetEntity;
|
||||
|
||||
if (null == targetEntity) {
|
||||
throw new MappingException("No mapping metadata found for " + target.getClass());
|
||||
@@ -735,17 +792,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("No id property found on class " + targetEntity.getType());
|
||||
}
|
||||
|
||||
BeanWrapper<MongoPersistentEntity<Object>, Object> wrapper = BeanWrapper.create(target, conversionService);
|
||||
Object id = wrapper.getProperty(idProperty, Object.class, useFieldAccessOnly);
|
||||
Object id = null;
|
||||
|
||||
if (target.getClass().equals(idProperty.getType())) {
|
||||
id = target;
|
||||
} else {
|
||||
BeanWrapper<MongoPersistentEntity<Object>, Object> wrapper = BeanWrapper.create(target, conversionService);
|
||||
id = wrapper.getProperty(idProperty, Object.class, useFieldAccessOnly);
|
||||
}
|
||||
|
||||
if (null == id) {
|
||||
throw new MappingException("Cannot create a reference to an object with a NULL id.");
|
||||
}
|
||||
|
||||
DB db = mongoDbFactory.getDb();
|
||||
db = dbref != null && StringUtils.hasText(dbref.db()) ? mongoDbFactory.getDb(dbref.db()) : db;
|
||||
|
||||
return new DBRef(db, targetEntity.getCollection(), idMapper.convertId(id));
|
||||
return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), targetEntity,
|
||||
idMapper.convertId(id));
|
||||
}
|
||||
|
||||
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator eval,
|
||||
@@ -762,7 +823,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param sourceValue must not be {@literal null}.
|
||||
* @return the converted {@link Collection} or array, will never be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings({ "unchecked", "null" })
|
||||
private Object readCollectionOrArray(TypeInformation<?> targetType, BasicDBList sourceValue, Object parent) {
|
||||
|
||||
Assert.notNull(targetType);
|
||||
@@ -775,17 +836,26 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
|
||||
|
||||
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>() : CollectionFactory
|
||||
.createCollection(collectionType, sourceValue.size());
|
||||
TypeInformation<?> componentType = targetType.getComponentType();
|
||||
Class<?> rawComponentType = componentType == null ? null : componentType.getType();
|
||||
|
||||
Collection<Object> items;
|
||||
|
||||
if (targetType.getType().isArray()) {
|
||||
items = new ArrayList<Object>();
|
||||
} else if (EnumSet.class.isAssignableFrom(collectionType)) {
|
||||
Assert.notNull(rawComponentType, "Component type must not be null for enum sets!");
|
||||
items = EnumSet.noneOf(rawComponentType.asSubclass(Enum.class));
|
||||
} else {
|
||||
items = CollectionFactory.createCollection(collectionType, sourceValue.size());
|
||||
}
|
||||
|
||||
for (int i = 0; i < sourceValue.size(); i++) {
|
||||
|
||||
Object dbObjItem = sourceValue.get(i);
|
||||
|
||||
if (dbObjItem instanceof DBRef) {
|
||||
items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, ((DBRef) dbObjItem).fetch(),
|
||||
items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem),
|
||||
parent));
|
||||
} else if (dbObjItem instanceof DBObject) {
|
||||
items.add(read(componentType, (DBObject) dbObjItem, parent));
|
||||
@@ -804,36 +874,48 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param dbObject
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings({ "unchecked", "null", "rawtypes" })
|
||||
protected Map<Object, Object> readMap(TypeInformation<?> type, DBObject dbObject, Object parent) {
|
||||
|
||||
Assert.notNull(dbObject);
|
||||
|
||||
Class<?> mapType = typeMapper.readType(dbObject, type).getType();
|
||||
Map<Object, Object> map = CollectionFactory.createMap(mapType, dbObject.keySet().size());
|
||||
|
||||
TypeInformation<?> keyType = type.getComponentType();
|
||||
Class<?> rawKeyType = keyType == null ? null : keyType.getType();
|
||||
|
||||
TypeInformation<?> valueType = type.getMapValueType();
|
||||
Class<?> rawValueType = valueType == null ? null : valueType.getType();
|
||||
|
||||
Map<Object, Object> map;
|
||||
|
||||
if (EnumMap.class.isAssignableFrom(mapType)) {
|
||||
Assert.notNull(keyType, "Key type must nut be null for enum maps!");
|
||||
map = new EnumMap(rawKeyType.asSubclass(Enum.class));
|
||||
} else {
|
||||
map = CollectionFactory.createMap(mapType, dbObject.keySet().size());
|
||||
}
|
||||
|
||||
Map<String, Object> sourceMap = dbObject.toMap();
|
||||
|
||||
for (Entry<String, Object> entry : sourceMap.entrySet()) {
|
||||
|
||||
if (typeMapper.isTypeKey(entry.getKey())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Object key = potentiallyUnescapeMapKey(entry.getKey());
|
||||
|
||||
TypeInformation<?> keyTypeInformation = type.getComponentType();
|
||||
if (keyTypeInformation != null) {
|
||||
Class<?> keyType = keyTypeInformation.getType();
|
||||
key = conversionService.convert(key, keyType);
|
||||
if (rawKeyType != null) {
|
||||
key = conversionService.convert(key, rawKeyType);
|
||||
}
|
||||
|
||||
Object value = entry.getValue();
|
||||
TypeInformation<?> valueType = type.getMapValueType();
|
||||
Class<?> rawValueType = valueType == null ? null : valueType.getType();
|
||||
|
||||
if (value instanceof DBObject) {
|
||||
map.put(key, read(valueType, (DBObject) value, parent));
|
||||
} else if (value instanceof DBRef) {
|
||||
map.put(key, DBRef.class.equals(rawValueType) ? value : read(valueType, ((DBRef) value).fetch()));
|
||||
map.put(key, DBRef.class.equals(rawValueType) ? value : read(valueType, readRef((DBRef) value)));
|
||||
} else {
|
||||
Class<?> valueClass = valueType == null ? null : valueType.getType();
|
||||
map.put(key, getPotentiallyConvertedSimpleRead(value, valueClass));
|
||||
@@ -879,15 +961,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return getPotentiallyConvertedSimpleWrite(obj);
|
||||
}
|
||||
|
||||
TypeInformation<?> typeHint = typeInformation == null ? ClassTypeInformation.OBJECT : typeInformation;
|
||||
|
||||
if (obj instanceof BasicDBList) {
|
||||
return maybeConvertList((BasicDBList) obj);
|
||||
return maybeConvertList((BasicDBList) obj, typeHint);
|
||||
}
|
||||
|
||||
if (obj instanceof DBObject) {
|
||||
DBObject newValueDbo = new BasicDBObject();
|
||||
for (String vk : ((DBObject) obj).keySet()) {
|
||||
Object o = ((DBObject) obj).get(vk);
|
||||
newValueDbo.put(vk, convertToMongoType(o));
|
||||
newValueDbo.put(vk, convertToMongoType(o, typeHint));
|
||||
}
|
||||
return newValueDbo;
|
||||
}
|
||||
@@ -895,17 +979,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (obj instanceof Map) {
|
||||
DBObject result = new BasicDBObject();
|
||||
for (Map.Entry<Object, Object> entry : ((Map<Object, Object>) obj).entrySet()) {
|
||||
result.put(entry.getKey().toString(), convertToMongoType(entry.getValue()));
|
||||
result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeHint));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if (obj.getClass().isArray()) {
|
||||
return maybeConvertList(Arrays.asList((Object[]) obj));
|
||||
return maybeConvertList(Arrays.asList((Object[]) obj), typeHint);
|
||||
}
|
||||
|
||||
if (obj instanceof Collection) {
|
||||
return maybeConvertList((Collection<?>) obj);
|
||||
return maybeConvertList((Collection<?>) obj, typeHint);
|
||||
}
|
||||
|
||||
DBObject newDbo = new BasicDBObject();
|
||||
@@ -918,11 +1002,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfoRecursively(newDbo);
|
||||
}
|
||||
|
||||
public BasicDBList maybeConvertList(Iterable<?> source) {
|
||||
public BasicDBList maybeConvertList(Iterable<?> source, TypeInformation<?> typeInformation) {
|
||||
|
||||
BasicDBList newDbl = new BasicDBList();
|
||||
for (Object element : source) {
|
||||
newDbl.add(convertToMongoType(element));
|
||||
newDbl.add(convertToMongoType(element, typeInformation));
|
||||
}
|
||||
|
||||
return newDbl;
|
||||
}
|
||||
|
||||
@@ -965,7 +1051,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
private class MongoDbPropertyValueProvider implements PropertyValueProvider<MongoPersistentProperty> {
|
||||
|
||||
private final DBObject source;
|
||||
private final DBObjectAccessor source;
|
||||
private final SpELExpressionEvaluator evaluator;
|
||||
private final Object parent;
|
||||
|
||||
@@ -978,7 +1064,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(evaluator);
|
||||
|
||||
this.source = source;
|
||||
this.source = new DBObjectAccessor(source);
|
||||
this.evaluator = evaluator;
|
||||
this.parent = parent;
|
||||
}
|
||||
@@ -990,7 +1076,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
public <T> T getPropertyValue(MongoPersistentProperty property) {
|
||||
|
||||
String expression = property.getSpelExpression();
|
||||
Object value = expression != null ? evaluator.evaluate(expression) : source.get(property.getFieldName());
|
||||
Object value = expression != null ? evaluator.evaluate(expression) : source.get(property);
|
||||
|
||||
if (value == null) {
|
||||
return null;
|
||||
@@ -1043,7 +1129,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (conversions.hasCustomReadTarget(value.getClass(), rawType)) {
|
||||
return (T) conversionService.convert(value, rawType);
|
||||
} else if (value instanceof DBRef) {
|
||||
return (T) (rawType.equals(DBRef.class) ? value : read(type, ((DBRef) value).fetch(), parent));
|
||||
return (T) (rawType.equals(DBRef.class) ? value : read(type, readRef((DBRef) value), parent));
|
||||
} else if (value instanceof BasicDBList) {
|
||||
return (T) readCollectionOrArray(type, (BasicDBList) value, parent);
|
||||
} else if (value instanceof DBObject) {
|
||||
@@ -1052,4 +1138,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (T) getPotentiallyConvertedSimpleRead(value, rawType);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the fetch operation for the given {@link DBRef}.
|
||||
*
|
||||
* @param ref
|
||||
* @return
|
||||
*/
|
||||
DBObject readRef(DBRef ref) {
|
||||
return ref.fetch();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,19 +17,26 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -44,11 +51,12 @@ import com.mongodb.DBRef;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MongoConverter converter;
|
||||
@@ -79,7 +87,7 @@ public class QueryMapper {
|
||||
@SuppressWarnings("deprecation")
|
||||
public DBObject getMappedObject(DBObject query, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (Keyword.isKeyword(query)) {
|
||||
if (isNestedKeyword(query)) {
|
||||
return getMappedKeyword(new Keyword(query), entity);
|
||||
}
|
||||
|
||||
@@ -97,51 +105,77 @@ public class QueryMapper {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Keyword.isKeyword(key)) {
|
||||
if (isKeyword(key)) {
|
||||
result.putAll(getMappedKeyword(new Keyword(query, key), entity));
|
||||
continue;
|
||||
}
|
||||
|
||||
Field field = entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
Entry<String, Object> entry = getMappedObjectForField(field, query.get(key));
|
||||
|
||||
Object rawValue = query.get(key);
|
||||
String newKey = field.getMappedKey();
|
||||
|
||||
if (Keyword.isKeyword(rawValue) && !field.isIdField()) {
|
||||
Keyword keyword = new Keyword((DBObject) rawValue);
|
||||
result.put(newKey, getMappedKeyword(field, keyword));
|
||||
} else {
|
||||
result.put(newKey, getMappedValue(field, rawValue));
|
||||
}
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the mapped object value for given field out of rawValue taking nested {@link Keyword}s into account
|
||||
*
|
||||
* @param field
|
||||
* @param rawValue
|
||||
* @return
|
||||
*/
|
||||
protected Entry<String, Object> getMappedObjectForField(Field field, Object rawValue) {
|
||||
|
||||
String key = field.getMappedKey();
|
||||
Object value;
|
||||
|
||||
if (isNestedKeyword(rawValue) && !field.isIdField()) {
|
||||
Keyword keyword = new Keyword((DBObject) rawValue);
|
||||
value = getMappedKeyword(field, keyword);
|
||||
} else {
|
||||
value = getMappedValue(field, rawValue);
|
||||
}
|
||||
|
||||
return createMapEntry(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param entity
|
||||
* @param key
|
||||
* @param mappingContext
|
||||
* @return
|
||||
*/
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link DBObject} representing a keyword by mapping the keyword's value.
|
||||
*
|
||||
* @param query the {@link DBObject} representing a keyword (e.g. {@code $ne : … } )
|
||||
* @param keyword the {@link DBObject} representing a keyword (e.g. {@code $ne : … } )
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
private DBObject getMappedKeyword(Keyword query, MongoPersistentEntity<?> entity) {
|
||||
protected DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity<?> entity) {
|
||||
|
||||
// $or/$nor
|
||||
if (query.key.matches(N_OR_PATTERN) || query.value instanceof Iterable) {
|
||||
if (keyword.isOrOrNor() || keyword.hasIterableValue()) {
|
||||
|
||||
Iterable<?> conditions = (Iterable<?>) query.value;
|
||||
Iterable<?> conditions = keyword.getValue();
|
||||
BasicDBList newConditions = new BasicDBList();
|
||||
|
||||
for (Object condition : conditions) {
|
||||
newConditions.add(condition instanceof DBObject ? getMappedObject((DBObject) condition, entity)
|
||||
newConditions.add(isDBObject(condition) ? getMappedObject((DBObject) condition, entity)
|
||||
: convertSimpleOrDBObject(condition, entity));
|
||||
}
|
||||
|
||||
return new BasicDBObject(query.key, newConditions);
|
||||
return new BasicDBObject(keyword.getKey(), newConditions);
|
||||
}
|
||||
|
||||
return new BasicDBObject(query.key, convertSimpleOrDBObject(query.value, entity));
|
||||
return new BasicDBObject(keyword.getKey(), convertSimpleOrDBObject(keyword.getValue(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -151,13 +185,15 @@ public class QueryMapper {
|
||||
* @param keyword
|
||||
* @return
|
||||
*/
|
||||
private DBObject getMappedKeyword(Field property, Keyword keyword) {
|
||||
protected DBObject getMappedKeyword(Field property, Keyword keyword) {
|
||||
|
||||
boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists();
|
||||
Object value = needsAssociationConversion ? convertAssociation(keyword.value, property.getProperty())
|
||||
: getMappedValue(property.with(keyword.key), keyword.value);
|
||||
Object value = keyword.getValue();
|
||||
|
||||
return new BasicDBObject(keyword.key, value);
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property) : getMappedValue(
|
||||
property.with(keyword.getKey()), value);
|
||||
|
||||
return new BasicDBObject(keyword.key, convertedValue);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -169,43 +205,78 @@ public class QueryMapper {
|
||||
* @param newKey the key the value will be bound to eventually
|
||||
* @return
|
||||
*/
|
||||
private Object getMappedValue(Field documentField, Object value) {
|
||||
protected Object getMappedValue(Field documentField, Object value) {
|
||||
|
||||
if (documentField.isIdField()) {
|
||||
|
||||
if (value instanceof DBObject) {
|
||||
if (isDBObject(value)) {
|
||||
DBObject valueDbo = (DBObject) value;
|
||||
DBObject resultDbo = new BasicDBObject(valueDbo.toMap());
|
||||
|
||||
if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
valueDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
resultDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
} else if (valueDbo.containsField("$ne")) {
|
||||
valueDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject((DBObject) value, null);
|
||||
return getMappedObject(resultDbo, null);
|
||||
}
|
||||
|
||||
return valueDbo;
|
||||
return resultDbo;
|
||||
|
||||
} else {
|
||||
return convertId(value);
|
||||
}
|
||||
}
|
||||
|
||||
if (Keyword.isKeyword(value)) {
|
||||
if (isNestedKeyword(value)) {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
}
|
||||
|
||||
if (documentField.isAssociation()) {
|
||||
return convertAssociation(value, documentField.getProperty());
|
||||
if (isAssociationConversionNecessary(documentField, value)) {
|
||||
return convertAssociation(value, documentField);
|
||||
}
|
||||
|
||||
return convertSimpleOrDBObject(value, documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Field} represents an association reference that together with the given value
|
||||
* requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the
|
||||
* type of the given value is compatible with the type of the given document field in order to deal with potential
|
||||
* query field exclusions, since MongoDB uses the {@code int} {@literal 0} as an indicator for an excluded field.
|
||||
*
|
||||
* @param documentField must not be {@literal null}.
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
protected boolean isAssociationConversionNecessary(Field documentField, Object value) {
|
||||
|
||||
Assert.notNull(documentField, "Document field must not be null!");
|
||||
|
||||
if (value == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!documentField.isAssociation()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Class<? extends Object> type = value.getClass();
|
||||
MongoPersistentProperty property = documentField.getProperty();
|
||||
|
||||
if (property.getActualType().isAssignableFrom(type)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = documentField.getPropertyEntity();
|
||||
return entity.hasIdProperty() && entity.getIdProperty().getActualType().isAssignableFrom(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retriggers mapping if the given source is a {@link DBObject} or simply invokes the
|
||||
*
|
||||
@@ -213,13 +284,13 @@ public class QueryMapper {
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
private Object convertSimpleOrDBObject(Object source, MongoPersistentEntity<?> entity) {
|
||||
protected Object convertSimpleOrDBObject(Object source, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof BasicDBList) {
|
||||
return delegateConvertToMongoType(source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof DBObject) {
|
||||
if (isDBObject(source)) {
|
||||
return getMappedObject((DBObject) source, entity);
|
||||
}
|
||||
|
||||
@@ -235,7 +306,11 @@ public class QueryMapper {
|
||||
* @return the converted mongo type or null if source is null
|
||||
*/
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return converter.convertToMongoType(source);
|
||||
return converter.convertToMongoType(source, entity == null ? null : entity.getTypeInformation());
|
||||
}
|
||||
|
||||
protected Object convertAssociation(Object source, Field field) {
|
||||
return convertAssociation(source, field.getProperty());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -245,16 +320,16 @@ public class QueryMapper {
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
private Object convertAssociation(Object source, MongoPersistentProperty property) {
|
||||
protected Object convertAssociation(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (property == null || !property.isAssociation()) {
|
||||
if (property == null || source == null || source instanceof DBRef || source instanceof DBObject) {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (source instanceof Iterable) {
|
||||
BasicDBList result = new BasicDBList();
|
||||
for (Object element : (Iterable<?>) source) {
|
||||
result.add(element instanceof DBRef ? element : converter.toDBRef(element, property));
|
||||
result.add(createDbRefFor(element, property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -263,13 +338,55 @@ public class QueryMapper {
|
||||
BasicDBObject result = new BasicDBObject();
|
||||
DBObject dbObject = (DBObject) source;
|
||||
for (String key : dbObject.keySet()) {
|
||||
Object o = dbObject.get(key);
|
||||
result.put(key, o instanceof DBRef ? o : converter.toDBRef(o, property));
|
||||
result.put(key, createDbRefFor(dbObject.get(key), property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return source == null || source instanceof DBRef ? source : converter.toDBRef(source, property);
|
||||
return createDbRefFor(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the given value is a {@link DBObject}.
|
||||
*
|
||||
* @param value can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected final boolean isDBObject(Object value) {
|
||||
return value instanceof DBObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Entry} for the given {@link Field} with the given value.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @param value can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected final Entry<String, Object> createMapEntry(Field field, Object value) {
|
||||
return createMapEntry(field.getMappedKey(), value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Entry} with the given key and value.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param value can be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private Entry<String, Object> createMapEntry(String key, Object value) {
|
||||
|
||||
Assert.hasText(key, "Key must not be null or empty!");
|
||||
return Collections.singletonMap(key, value).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
private DBRef createDbRefFor(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
return (DBRef) source;
|
||||
}
|
||||
|
||||
return converter.toDBRef(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -289,15 +406,50 @@ public class QueryMapper {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Object} is a keyword, i.e. if it's a {@link DBObject} with a keyword key.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isNestedKeyword(Object candidate) {
|
||||
|
||||
if (!(candidate instanceof BasicDBObject)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BasicDBObject dbObject = (BasicDBObject) candidate;
|
||||
Set<String> keys = dbObject.keySet();
|
||||
|
||||
if (keys.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isKeyword(keys.iterator().next().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link String} is a MongoDB keyword. The default implementation will check against the
|
||||
* set of registered keywords returned by {@link #getKeywords()}.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isKeyword(String candidate) {
|
||||
return candidate.startsWith("$");
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object to capture a query keyword representation.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class Keyword {
|
||||
static class Keyword {
|
||||
|
||||
String key;
|
||||
Object value;
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
private final String key;
|
||||
private final Object value;
|
||||
|
||||
public Keyword(DBObject source, String key) {
|
||||
this.key = key;
|
||||
@@ -322,25 +474,21 @@ public class QueryMapper {
|
||||
return "$exists".equalsIgnoreCase(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given value actually represents a keyword. If this returns {@literal true} it's safe to call
|
||||
* the constructor.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public static boolean isKeyword(Object value) {
|
||||
public boolean isOrOrNor() {
|
||||
return key.matches(N_OR_PATTERN);
|
||||
}
|
||||
|
||||
if (value instanceof String) {
|
||||
return ((String) value).startsWith("$");
|
||||
}
|
||||
public boolean hasIterableValue() {
|
||||
return value instanceof Iterable;
|
||||
}
|
||||
|
||||
if (!(value instanceof DBObject)) {
|
||||
return false;
|
||||
}
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
DBObject dbObject = (DBObject) value;
|
||||
return dbObject.keySet().size() == 1 && dbObject.keySet().iterator().next().startsWith("$");
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T getValue() {
|
||||
return (T) value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -349,7 +497,7 @@ public class QueryMapper {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class Field {
|
||||
protected static class Field {
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
|
||||
@@ -386,7 +534,9 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying {@link MongoPersistentProperty} backing the field.
|
||||
* Returns the underlying {@link MongoPersistentProperty} backing the field. For path traversals this will be the
|
||||
* property that represents the value to handle. This means it'll be the leaf property for plain paths or the
|
||||
* association property in case we refer to an association somewhere in the path.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@@ -420,18 +570,36 @@ public class QueryMapper {
|
||||
public String getMappedKey() {
|
||||
return isIdField() ? ID_KEY : name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field references an association in case it refers to a nested field.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean containsAssociation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Association<MongoPersistentProperty> getAssociation() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension of {@link DocumentField} to be backed with mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class MetadataBackedField extends Field {
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!";
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistentPropertyPath<MongoPersistentProperty> path;
|
||||
private final Association<MongoPersistentProperty> association;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and
|
||||
@@ -443,6 +611,21 @@ public class QueryMapper {
|
||||
*/
|
||||
public MetadataBackedField(String name, MongoPersistentEntity<?> entity,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
this(name, entity, context, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and
|
||||
* {@link MappingContext} with the given {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @param property may be {@literal null}.
|
||||
*/
|
||||
public MetadataBackedField(String name, MongoPersistentEntity<?> entity,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
MongoPersistentProperty property) {
|
||||
|
||||
super(name);
|
||||
|
||||
@@ -451,8 +634,9 @@ public class QueryMapper {
|
||||
this.entity = entity;
|
||||
this.mappingContext = context;
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
this.property = path == null ? null : path.getLeafProperty();
|
||||
this.path = getPath(name);
|
||||
this.property = path == null ? property : path.getLeafProperty();
|
||||
this.association = findAssociation();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -461,7 +645,7 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public MetadataBackedField with(String name) {
|
||||
return new MetadataBackedField(name, entity, mappingContext);
|
||||
return new MetadataBackedField(name, entity, mappingContext, property);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -486,7 +670,7 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public MongoPersistentProperty getProperty() {
|
||||
return property;
|
||||
return association == null ? property : association.getInverse();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -505,9 +689,34 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public boolean isAssociation() {
|
||||
return association != null;
|
||||
}
|
||||
|
||||
MongoPersistentProperty property = getProperty();
|
||||
return property == null ? false : property.isAssociation();
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getAssociation()
|
||||
*/
|
||||
@Override
|
||||
public Association<MongoPersistentProperty> getAssociation() {
|
||||
return association;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the association property in the {@link PersistentPropertyPath}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private final Association<MongoPersistentProperty> findAssociation() {
|
||||
|
||||
if (this.path != null) {
|
||||
for (MongoPersistentProperty p : this.path) {
|
||||
if (p.isAssociation()) {
|
||||
return p.getAssociation();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -516,19 +725,57 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public String getMappedKey() {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
return path == null ? name : path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE);
|
||||
return path == null ? name : path.toDotPath(getPropertyConverter());
|
||||
}
|
||||
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String name) {
|
||||
protected PersistentPropertyPath<MongoPersistentProperty> getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link PersistentPropertyPath} for the given <code>pathExpression</code>.
|
||||
*
|
||||
* @param pathExpression
|
||||
* @return
|
||||
*/
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
try {
|
||||
PropertyPath path = PropertyPath.from(name, entity.getTypeInformation());
|
||||
return mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
PropertyPath path = PropertyPath.from(pathExpression, entity.getTypeInformation());
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
boolean associationDetected = false;
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
|
||||
MongoPersistentProperty property = iterator.next();
|
||||
|
||||
if (property.isAssociation()) {
|
||||
associationDetected = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (associationDetected && !property.isIdProperty()) {
|
||||
throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression));
|
||||
}
|
||||
}
|
||||
|
||||
return propertyPath;
|
||||
} catch (PropertyReferenceException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Converter} to be used to created the mapped key. Default implementation will use
|
||||
* {@link PropertyToFieldNameConverter}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,16 +15,35 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifier;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifiers;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* A subclass of {@link QueryMapper} that retains type information on the mongo types.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class UpdateMapper extends QueryMapper {
|
||||
|
||||
private final MongoWriter<?> converter;
|
||||
private final MongoConverter converter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdateMapper} using the given {@link MongoConverter}.
|
||||
@@ -49,4 +68,208 @@ public class UpdateMapper extends QueryMapper {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#getMappedObjectForField(org.springframework.data.mongodb.core.convert.QueryMapper.Field, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Entry<String, Object> getMappedObjectForField(Field field, Object rawValue) {
|
||||
|
||||
if (isDBObject(rawValue)) {
|
||||
return createMapEntry(field, convertSimpleOrDBObject(rawValue, field.getPropertyEntity()));
|
||||
}
|
||||
|
||||
if (isQuery(rawValue)) {
|
||||
return createMapEntry(field,
|
||||
super.getMappedObject(((Query) rawValue).getQueryObject(), field.getPropertyEntity()));
|
||||
}
|
||||
|
||||
if (isUpdateModifier(rawValue)) {
|
||||
return getMappedUpdateModifier(field, rawValue);
|
||||
}
|
||||
|
||||
return super.getMappedObjectForField(field, getMappedValue(field, rawValue));
|
||||
}
|
||||
|
||||
private Entry<String, Object> getMappedUpdateModifier(Field field, Object rawValue) {
|
||||
Object value = null;
|
||||
|
||||
if (rawValue instanceof Modifier) {
|
||||
|
||||
value = getMappedValue((Modifier) rawValue);
|
||||
|
||||
} else if (rawValue instanceof Modifiers) {
|
||||
|
||||
DBObject modificationOperations = new BasicDBObject();
|
||||
|
||||
for (Modifier modifier : ((Modifiers) rawValue).getModifiers()) {
|
||||
modificationOperations.putAll(getMappedValue(modifier).toMap());
|
||||
}
|
||||
|
||||
value = modificationOperations;
|
||||
} else {
|
||||
throw new IllegalArgumentException(String.format("Unable to map value of type '%s'!", rawValue.getClass()));
|
||||
}
|
||||
|
||||
return createMapEntry(field, value);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#isAssociationConversionNecessary(org.springframework.data.mongodb.core.convert.QueryMapper.Field, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isAssociationConversionNecessary(Field documentField, Object value) {
|
||||
return super.isAssociationConversionNecessary(documentField, value) || documentField.containsAssociation();
|
||||
}
|
||||
|
||||
private boolean isUpdateModifier(Object value) {
|
||||
return value instanceof Modifier || value instanceof Modifiers;
|
||||
}
|
||||
|
||||
private boolean isQuery(Object value) {
|
||||
return value instanceof Query;
|
||||
}
|
||||
|
||||
private DBObject getMappedValue(Modifier modifier) {
|
||||
|
||||
Object value = converter.convertToMongoType(modifier.getValue(), ClassTypeInformation.OBJECT);
|
||||
return new BasicDBObject(modifier.getKey(), value);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext)
|
||||
*/
|
||||
@Override
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext) : //
|
||||
new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MetadataBackedField} that handles {@literal $} paths inside a field key. We clean up an update key
|
||||
* containing a {@literal $} before handing it to the super class to make sure property lookups and transformations
|
||||
* continue to work as expected. We provide a custom property converter to re-applied the cleaned up {@literal $}s
|
||||
* when constructing the mapped key.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class MetadataBackedUpdateField extends MetadataBackedField {
|
||||
|
||||
private final String key;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given {@link MongoPersistentEntity}, key and
|
||||
* {@link MappingContext}. We clean up the key before handing it up to the super class to make sure it continues to
|
||||
* work as expected.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$", ""), entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getMappedKey()
|
||||
*/
|
||||
@Override
|
||||
public String getMappedKey() {
|
||||
return this.getPath() == null ? key : super.getMappedKey();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getPropertyConverter()
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return isAssociation() ? new AssociationConverter(getAssociation()) : new UpdatePropertyConverter(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converter to skip all properties after an association property was rendered.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class AssociationConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private boolean associationFound;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public AssociationConverter(Association<MongoPersistentProperty> association) {
|
||||
|
||||
Assert.notNull(association, "Association must not be null!");
|
||||
this.property = association.getInverse();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
|
||||
if (associationFound) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (property.equals(source)) {
|
||||
associationFound = true;
|
||||
}
|
||||
|
||||
return source.getFieldName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Special {@link Converter} for {@link MongoPersistentProperty} instances that will concatenate the {@literal $}
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
*
|
||||
* @param updateKey must not be {@literal null} or empty.
|
||||
*/
|
||||
public UpdatePropertyConverter(String updateKey) {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(updateKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
return iterator.hasNext() && iterator.next().equals("$") ? String.format("%s.$", mappedName) : mappedName;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,10 +30,8 @@ import org.springframework.util.Assert;
|
||||
*/
|
||||
public class Point {
|
||||
|
||||
@Field(order = 10)
|
||||
private final double x;
|
||||
@Field(order = 20)
|
||||
private final double y;
|
||||
@Field(order = 10) private final double x;
|
||||
@Field(order = 20) private final double y;
|
||||
|
||||
@PersistenceConstructor
|
||||
public Point(double x, double y) {
|
||||
@@ -69,9 +67,9 @@ public class Point {
|
||||
int result = 1;
|
||||
long temp;
|
||||
temp = Double.doubleToLongBits(x);
|
||||
result = prime * result + (int) (temp ^ (temp >>> 32));
|
||||
result = prime * result + (int) (temp ^ temp >>> 32);
|
||||
temp = Double.doubleToLongBits(y);
|
||||
result = prime * result + (int) (temp ^ (temp >>> 32));
|
||||
result = prime * result + (int) (temp ^ temp >>> 32);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -98,6 +96,6 @@ public class Point {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("Point [latitude=%f, longitude=%f]", x, y);
|
||||
return String.format("Point [x=%f, y=%f]", x, y);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,10 +51,24 @@ public @interface CompoundIndex {
|
||||
@Deprecated
|
||||
IndexDirection direction() default IndexDirection.ASCENDING;
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-unique/
|
||||
* @return
|
||||
*/
|
||||
boolean unique() default false;
|
||||
|
||||
/**
|
||||
* If set to true index will skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-sparse/
|
||||
* @return
|
||||
*/
|
||||
boolean sparse() default false;
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping
|
||||
* @return
|
||||
*/
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
/**
|
||||
* Geoposatial index type.
|
||||
*
|
||||
* @author Laurent Canet
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
public enum GeoSpatialIndexType {
|
||||
|
||||
/**
|
||||
* Simple 2-Dimensional index for legacy-format points.
|
||||
*/
|
||||
GEO_2D,
|
||||
|
||||
/**
|
||||
* 2D Index for GeoJSON-formatted data over a sphere. Only available in Mongo 2.4.
|
||||
*/
|
||||
GEO_2DSPHERE,
|
||||
|
||||
/**
|
||||
* An haystack index for grouping results over small results.
|
||||
*/
|
||||
GEO_HAYSTACK
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -13,7 +13,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
@@ -24,7 +23,8 @@ import java.lang.annotation.Target;
|
||||
/**
|
||||
* Mark a field to be indexed using MongoDB's geospatial indexing feature.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Jon Brisbin
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -65,4 +65,27 @@ public @interface GeoSpatialIndexed {
|
||||
*/
|
||||
int bits() default 26;
|
||||
|
||||
/**
|
||||
* The type of the geospatial index. Default is {@link GeoSpatialIndexType#GEO_2D}
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
GeoSpatialIndexType type() default GeoSpatialIndexType.GEO_2D;
|
||||
|
||||
/**
|
||||
* The bucket size for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes, in coordinate units.
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
double bucketSize() default 1.0;
|
||||
|
||||
/**
|
||||
* The name of the additional field to use for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
String additionalField() default "";
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -25,14 +26,18 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
public class GeospatialIndex implements IndexDefinition {
|
||||
|
||||
private final String field;
|
||||
private String name;
|
||||
private Integer min = null;
|
||||
private Integer max = null;
|
||||
private Integer bits = null;
|
||||
private Integer min;
|
||||
private Integer max;
|
||||
private Integer bits;
|
||||
private GeoSpatialIndexType type = GeoSpatialIndexType.GEO_2D;
|
||||
private Double bucketSize = 1.0;
|
||||
private String additionalField;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeospatialIndex} for the given field.
|
||||
@@ -40,52 +45,146 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
* @param field must not be empty or {@literal null}.
|
||||
*/
|
||||
public GeospatialIndex(String field) {
|
||||
Assert.hasText(field);
|
||||
|
||||
Assert.hasText(field, "Field must have text!");
|
||||
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex named(String name) {
|
||||
|
||||
Assert.hasText(name, "Name must have text!");
|
||||
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param min
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withMin(int min) {
|
||||
this.min = Integer.valueOf(min);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param max
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withMax(int max) {
|
||||
this.max = Integer.valueOf(max);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param bits
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withBits(int bits) {
|
||||
this.bits = Integer.valueOf(bits);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param type must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex typed(GeoSpatialIndexType type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param bucketSize
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withBucketSize(double bucketSize) {
|
||||
this.bucketSize = bucketSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fieldName.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withAdditionalField(String fieldName) {
|
||||
this.additionalField = fieldName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
dbo.put(field, "2d");
|
||||
|
||||
switch (type) {
|
||||
|
||||
case GEO_2D:
|
||||
dbo.put(field, "2d");
|
||||
break;
|
||||
|
||||
case GEO_2DSPHERE:
|
||||
dbo.put(field, "2dsphere");
|
||||
break;
|
||||
|
||||
case GEO_HAYSTACK:
|
||||
dbo.put(field, "geoHaystack");
|
||||
if (!StringUtils.hasText(additionalField)) {
|
||||
throw new IllegalArgumentException("When defining geoHaystack index, an additionnal field must be defined");
|
||||
}
|
||||
dbo.put(additionalField, 1);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported geospatial index " + type);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
public DBObject getIndexOptions() {
|
||||
if (name == null && min == null && max == null) {
|
||||
|
||||
if (name == null && min == null && max == null && bucketSize == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
if (name != null) {
|
||||
dbo.put("name", name);
|
||||
}
|
||||
if (min != null) {
|
||||
dbo.put("min", min);
|
||||
}
|
||||
if (max != null) {
|
||||
dbo.put("max", max);
|
||||
}
|
||||
if (bits != null) {
|
||||
dbo.put("bits", bits);
|
||||
|
||||
switch (type) {
|
||||
|
||||
case GEO_2D:
|
||||
|
||||
if (min != null) {
|
||||
dbo.put("min", min);
|
||||
}
|
||||
if (max != null) {
|
||||
dbo.put("max", max);
|
||||
}
|
||||
if (bits != null) {
|
||||
dbo.put("bits", bits);
|
||||
}
|
||||
break;
|
||||
|
||||
case GEO_2DSPHERE:
|
||||
|
||||
break;
|
||||
|
||||
case GEO_HAYSTACK:
|
||||
|
||||
if (bucketSize != null) {
|
||||
dbo.put("bucketSize", bucketSize);
|
||||
}
|
||||
break;
|
||||
}
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@@ -41,8 +41,7 @@ public class Index implements IndexDefinition {
|
||||
|
||||
private boolean sparse = false;
|
||||
|
||||
public Index() {
|
||||
}
|
||||
public Index() {}
|
||||
|
||||
public Index(String key, Direction direction) {
|
||||
fieldSpec.put(key, direction);
|
||||
@@ -83,16 +82,33 @@ public class Index implements IndexDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reject all documents that contain a duplicate value for the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-unique/
|
||||
* @return
|
||||
*/
|
||||
public Index unique() {
|
||||
this.unique = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-sparse/
|
||||
* @return
|
||||
*/
|
||||
public Index sparse() {
|
||||
this.sparse = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping
|
||||
* @param duplicates
|
||||
* @return
|
||||
*/
|
||||
public Index unique(Duplicates duplicates) {
|
||||
if (duplicates == Duplicates.DROP) {
|
||||
this.dropDuplicates = true;
|
||||
|
||||
@@ -32,16 +32,42 @@ import java.lang.annotation.Target;
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface Indexed {
|
||||
|
||||
/**
|
||||
* If set to true reject all documents that contain a duplicate value for the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-unique/
|
||||
* @return
|
||||
*/
|
||||
boolean unique() default false;
|
||||
|
||||
IndexDirection direction() default IndexDirection.ASCENDING;
|
||||
|
||||
/**
|
||||
* If set to true index will skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/index-sparse/
|
||||
* @return
|
||||
*/
|
||||
boolean sparse() default false;
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping
|
||||
* @return
|
||||
*/
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* Index name.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* Colleciton name for index to be created on.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -45,11 +45,12 @@ import com.mongodb.util.JSON;
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
|
||||
private final Map<Class<?>, Boolean> classesSeen = new ConcurrentHashMap<Class<?>, Boolean>();
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
@@ -96,8 +97,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
protected void checkForIndexes(final MongoPersistentEntity<?> entity) {
|
||||
final Class<?> type = entity.getType();
|
||||
if (!classesSeen.containsKey(type)) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Analyzing class " + type + " for index information.");
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Analyzing class " + type + " for index information.");
|
||||
}
|
||||
|
||||
// Make sure indexes get created
|
||||
@@ -111,8 +112,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
ensureIndex(indexColl, index.name(), definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created compound index " + index);
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Created compound index " + index);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -132,8 +133,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
} else {
|
||||
if (!name.equals(field.getName()) && index.unique() && !index.sparse()) {
|
||||
// Names don't match, and sparse is not true. This situation will generate an error on the server.
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("The index name " + name + " doesn't match this property name: " + field.getName()
|
||||
if (LOGGER.isWarnEnabled()) {
|
||||
LOGGER.warn("The index name " + name + " doesn't match this property name: " + field.getName()
|
||||
+ ". Setting sparse=true on this index will prevent errors when inserting documents.");
|
||||
}
|
||||
}
|
||||
@@ -146,8 +147,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
ensureIndex(collection, name, definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created property index " + index);
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Created property index " + index);
|
||||
}
|
||||
|
||||
} else if (field.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
@@ -157,13 +158,15 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
GeospatialIndex indexObject = new GeospatialIndex(persistentProperty.getFieldName());
|
||||
indexObject.withMin(index.min()).withMax(index.max());
|
||||
indexObject.named(StringUtils.hasText(index.name()) ? index.name() : field.getName());
|
||||
indexObject.typed(index.type()).withBucketSize(index.bucketSize())
|
||||
.withAdditionalField(index.additionalField());
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
mongoDbFactory.getDb().getCollection(collection)
|
||||
.ensureIndex(indexObject.getIndexKeys(), indexObject.getIndexOptions());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(String.format("Created %s for entity %s in collection %s! ", indexObject, entity.getType(),
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Created %s for entity %s in collection %s! ", indexObject, entity.getType(),
|
||||
collection));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 by the original author(s).
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,7 +27,8 @@ import org.springframework.data.annotation.Reference;
|
||||
* An annotation that indicates the annotated field is to be stored using a {@link com.mongodb.DBRef}.
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @authot Oliver Gierke
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -41,4 +42,11 @@ public @interface DBRef {
|
||||
* @return
|
||||
*/
|
||||
String db() default "";
|
||||
|
||||
/**
|
||||
* Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean lazy() default false;
|
||||
}
|
||||
|
||||
@@ -126,13 +126,13 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
|
||||
public void onAfterDelete(DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({})", dbo);
|
||||
LOG.debug("onAfterDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
public void onBeforeDelete(DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({})", dbo);
|
||||
LOG.debug("onBeforeDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,14 +28,14 @@ import org.springframework.util.Assert;
|
||||
*/
|
||||
public class AuditingEventListener implements ApplicationListener<BeforeConvertEvent<Object>> {
|
||||
|
||||
private final IsNewAwareAuditingHandler<Object> auditingHandler;
|
||||
private final IsNewAwareAuditingHandler auditingHandler;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AuditingEventListener} using the given {@link MappingContext} and {@link AuditingHandler}.
|
||||
*
|
||||
* @param auditingHandler must not be {@literal null}.
|
||||
*/
|
||||
public AuditingEventListener(IsNewAwareAuditingHandler<Object> auditingHandler) {
|
||||
public AuditingEventListener(IsNewAwareAuditingHandler auditingHandler) {
|
||||
|
||||
Assert.notNull(auditingHandler, "IsNewAwareAuditingHandler must not be null!");
|
||||
this.auditingHandler = auditingHandler;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2012 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,7 +23,7 @@ package org.springframework.data.mongodb.core.mapreduce;
|
||||
*/
|
||||
public class MapReduceCounts {
|
||||
|
||||
public static MapReduceCounts NONE = new MapReduceCounts(-1, -1, -1);
|
||||
public static final MapReduceCounts NONE = new MapReduceCounts(-1, -1, -1);
|
||||
|
||||
private final long inputCount;
|
||||
private final long emitCount;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,6 +43,7 @@ import com.mongodb.DBObject;
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class Criteria implements CriteriaDefinition {
|
||||
|
||||
@@ -52,11 +53,8 @@ public class Criteria implements CriteriaDefinition {
|
||||
private static final Object NOT_SET = new Object();
|
||||
|
||||
private String key;
|
||||
|
||||
private List<Criteria> criteriaChain;
|
||||
|
||||
private LinkedHashMap<String, Object> criteria = new LinkedHashMap<String, Object>();
|
||||
|
||||
private Object isValue = NOT_SET;
|
||||
|
||||
public Criteria() {
|
||||
@@ -101,13 +99,16 @@ public class Criteria implements CriteriaDefinition {
|
||||
* @return
|
||||
*/
|
||||
public Criteria is(Object o) {
|
||||
if (isValue != NOT_SET) {
|
||||
|
||||
if (!isValue.equals(NOT_SET)) {
|
||||
throw new InvalidMongoDbApiUsageException(
|
||||
"Multiple 'is' values declared. You need to use 'and' with multiple criteria");
|
||||
}
|
||||
|
||||
if (lastOperatorWasNot()) {
|
||||
throw new InvalidMongoDbApiUsageException("Invalid query: 'not' can't be used with 'is' - use 'ne' instead.");
|
||||
}
|
||||
|
||||
this.isValue = o;
|
||||
return this;
|
||||
}
|
||||
@@ -117,8 +118,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $ne operator
|
||||
* Creates a criterion using the {@literal $ne} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/ne/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
@@ -128,8 +130,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $lt operator
|
||||
* Creates a criterion using the {@literal $lt} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/lt/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
@@ -139,8 +142,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $lte operator
|
||||
* Creates a criterion using the {@literal $lte} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/lte/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
@@ -150,8 +154,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $gt operator
|
||||
* Creates a criterion using the {@literal $gt} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/gt/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
@@ -161,8 +166,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $gte operator
|
||||
* Creates a criterion using the {@literal $gte} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/gte/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
@@ -172,8 +178,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $in operator
|
||||
* Creates a criterion using the {@literal $in} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/in/
|
||||
* @param o the values to match against
|
||||
* @return
|
||||
*/
|
||||
@@ -187,8 +194,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $in operator
|
||||
* Creates a criterion using the {@literal $in} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/in/
|
||||
* @param c the collection containing the values to match against
|
||||
* @return
|
||||
*/
|
||||
@@ -198,8 +206,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $nin operator
|
||||
* Creates a criterion using the {@literal $nin} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/nin/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
@@ -207,14 +216,22 @@ public class Criteria implements CriteriaDefinition {
|
||||
return nin(Arrays.asList(o));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the {@literal $nin} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/nin/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
public Criteria nin(Collection<?> o) {
|
||||
criteria.put("$nin", o);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $mod operator
|
||||
* Creates a criterion using the {@literal $mod} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/mod/
|
||||
* @param value
|
||||
* @param remainder
|
||||
* @return
|
||||
@@ -228,8 +245,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $all operator
|
||||
* Creates a criterion using the {@literal $all} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/all/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
@@ -237,14 +255,22 @@ public class Criteria implements CriteriaDefinition {
|
||||
return all(Arrays.asList(o));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the {@literal $all} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/all/
|
||||
* @param o
|
||||
* @return
|
||||
*/
|
||||
public Criteria all(Collection<?> o) {
|
||||
criteria.put("$all", o);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $size operator
|
||||
* Creates a criterion using the {@literal $size} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/size/
|
||||
* @param s
|
||||
* @return
|
||||
*/
|
||||
@@ -254,8 +280,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $exists operator
|
||||
* Creates a criterion using the {@literal $exists} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/exists/
|
||||
* @param b
|
||||
* @return
|
||||
*/
|
||||
@@ -265,8 +292,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $type operator
|
||||
* Creates a criterion using the {@literal $type} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/type/
|
||||
* @param t
|
||||
* @return
|
||||
*/
|
||||
@@ -276,22 +304,31 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $not meta operator which affects the clause directly following
|
||||
* Creates a criterion using the {@literal $not} meta operator which affects the clause directly following
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/not/
|
||||
* @return
|
||||
*/
|
||||
public Criteria not() {
|
||||
return not(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the {@literal $not} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/not/
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
private Criteria not(Object value) {
|
||||
criteria.put("$not", value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using a $regex
|
||||
* Creates a criterion using a {@literal $regex} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/regex/
|
||||
* @param re
|
||||
* @return
|
||||
*/
|
||||
@@ -300,8 +337,10 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using a $regex and $options
|
||||
* Creates a criterion using a {@literal $regex} and {@literal $options} operator.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/regex/
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/regex/#op._S_options
|
||||
* @param re
|
||||
* @param options
|
||||
* @return
|
||||
@@ -334,8 +373,11 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geospatial criterion using a $within $center operation. This is only available for Mongo 1.7 and higher.
|
||||
* Creates a geospatial criterion using a {@literal $within $centerSphere} operation. This is only available for Mongo
|
||||
* 1.7 and higher.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/geoWithin/
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/centerSphere/
|
||||
* @param circle must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
@@ -345,6 +387,13 @@ public class Criteria implements CriteriaDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geospatial criterion using a {@literal $within} operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/geoWithin/
|
||||
* @param shape
|
||||
* @return
|
||||
*/
|
||||
public Criteria within(Shape shape) {
|
||||
|
||||
Assert.notNull(shape);
|
||||
@@ -353,8 +402,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geospatial criterion using a $near operation
|
||||
* Creates a geospatial criterion using a {@literal $near} operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/near/
|
||||
* @param point must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
@@ -365,8 +415,10 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geospatial criterion using a $nearSphere operation. This is only available for Mongo 1.7 and higher.
|
||||
* Creates a geospatial criterion using a {@literal $nearSphere} operation. This is only available for Mongo 1.7 and
|
||||
* higher.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/nearSphere/
|
||||
* @param point must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
@@ -377,8 +429,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geospatical criterion using a $maxDistance operation, for use with $near
|
||||
* Creates a geospatical criterion using a {@literal $maxDistance} operation, for use with $near
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/maxDistance/
|
||||
* @param maxDistance
|
||||
* @return
|
||||
*/
|
||||
@@ -388,8 +441,9 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $elemMatch operator
|
||||
* Creates a criterion using the {@literal $elemMatch} operator
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/elemMatch/
|
||||
* @param c
|
||||
* @return
|
||||
*/
|
||||
@@ -475,8 +529,10 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
protected DBObject getSingleCriteriaObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
boolean not = false;
|
||||
|
||||
for (String k : this.criteria.keySet()) {
|
||||
Object value = this.criteria.get(k);
|
||||
if (not) {
|
||||
@@ -494,12 +550,14 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
DBObject queryCriteria = new BasicDBObject();
|
||||
if (isValue != NOT_SET) {
|
||||
|
||||
if (!NOT_SET.equals(isValue)) {
|
||||
queryCriteria.put(this.key, this.isValue);
|
||||
queryCriteria.putAll(dbo);
|
||||
} else {
|
||||
queryCriteria.put(this.key, dbo);
|
||||
}
|
||||
|
||||
return queryCriteria;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,8 +31,9 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class NearQuery {
|
||||
public final class NearQuery {
|
||||
|
||||
private final Point point;
|
||||
private Query query;
|
||||
@@ -143,10 +144,12 @@ public class NearQuery {
|
||||
/**
|
||||
* Configures the {@link Pageable} to use.
|
||||
*
|
||||
* @param pageable
|
||||
* @param pageable must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public NearQuery with(Pageable pageable) {
|
||||
|
||||
Assert.notNull(pageable, "Pageable must not be 'null'.");
|
||||
this.num = pageable.getOffset() + pageable.getPageSize();
|
||||
this.skip = pageable.getOffset();
|
||||
return this;
|
||||
@@ -311,13 +314,18 @@ public class NearQuery {
|
||||
/**
|
||||
* Adds an actual query to the {@link NearQuery} to restrict the objects considered for the actual near operation.
|
||||
*
|
||||
* @param query
|
||||
* @param query must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public NearQuery query(Query query) {
|
||||
|
||||
Assert.notNull(query, "Cannot apply 'null' query on NearQuery.");
|
||||
this.query = query;
|
||||
this.skip = query.getSkip();
|
||||
this.num = query.getLimit();
|
||||
|
||||
if (query.getLimit() != 0) {
|
||||
this.num = query.getLimit();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -41,10 +42,10 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
public class Query {
|
||||
|
||||
private final static String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
|
||||
private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
|
||||
|
||||
private final Set<Class<?>> restrictedTypes = new HashSet<Class<?>>();
|
||||
private LinkedHashMap<String, Criteria> criteria = new LinkedHashMap<String, Criteria>();
|
||||
private final Map<String, Criteria> criteria = new LinkedHashMap<String, Criteria>();
|
||||
private Field fieldSpec;
|
||||
private Sort sort;
|
||||
private int skip;
|
||||
@@ -98,11 +99,23 @@ public class Query {
|
||||
return this.fieldSpec;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set number of documents to skip before returning results.
|
||||
*
|
||||
* @param skip
|
||||
* @return
|
||||
*/
|
||||
public Query skip(int skip) {
|
||||
this.skip = skip;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Limit the number of returned documents to {@code limit}.
|
||||
*
|
||||
* @param limit
|
||||
* @return
|
||||
*/
|
||||
public Query limit(int limit) {
|
||||
this.limit = limit;
|
||||
return this;
|
||||
@@ -197,6 +210,7 @@ public class Query {
|
||||
public DBObject getQueryObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
|
||||
for (String k : criteria.keySet()) {
|
||||
CriteriaDefinition c = criteria.get(k);
|
||||
DBObject cl = c.getCriteriaObject();
|
||||
@@ -211,37 +225,45 @@ public class Query {
|
||||
}
|
||||
|
||||
public DBObject getFieldsObject() {
|
||||
if (this.fieldSpec == null) {
|
||||
return null;
|
||||
}
|
||||
return fieldSpec.getFieldsObject();
|
||||
return this.fieldSpec == null ? null : fieldSpec.getFieldsObject();
|
||||
}
|
||||
|
||||
public DBObject getSortObject() {
|
||||
|
||||
if (this.sort == null && this.sort == null) {
|
||||
if (this.sort == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
|
||||
if (this.sort != null) {
|
||||
for (org.springframework.data.domain.Sort.Order order : this.sort) {
|
||||
dbo.put(order.getProperty(), order.isAscending() ? 1 : -1);
|
||||
}
|
||||
for (org.springframework.data.domain.Sort.Order order : this.sort) {
|
||||
dbo.put(order.getProperty(), order.isAscending() ? 1 : -1);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of documents to skip.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public int getSkip() {
|
||||
return this.skip;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum number of documents to be return.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public int getLimit() {
|
||||
return this.limit;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
public String getHint() {
|
||||
return hint;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,11 +16,18 @@
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -31,6 +38,8 @@ import com.mongodb.DBObject;
|
||||
* @author Thomas Risberg
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Becca Gaspard
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class Update {
|
||||
|
||||
@@ -38,7 +47,9 @@ public class Update {
|
||||
LAST, FIRST
|
||||
}
|
||||
|
||||
private HashMap<String, Object> modifierOps = new LinkedHashMap<String, Object>();
|
||||
private Set<String> keysToUpdate = new HashSet<String>();
|
||||
private Map<String, Object> modifierOps = new LinkedHashMap<String, Object>();
|
||||
private Map<String, PushOperatorBuilder> pushCommandBuilders = new LinkedHashMap<String, PushOperatorBuilder>(1);
|
||||
|
||||
/**
|
||||
* Static factory method to create an Update using the provided key
|
||||
@@ -72,15 +83,22 @@ public class Update {
|
||||
continue;
|
||||
}
|
||||
|
||||
update.modifierOps.put(key, object.get(key));
|
||||
Object value = object.get(key);
|
||||
update.modifierOps.put(key, value);
|
||||
if (isKeyword(key) && value instanceof DBObject) {
|
||||
update.keysToUpdate.addAll(((DBObject) value).keySet());
|
||||
} else {
|
||||
update.keysToUpdate.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $set update modifier
|
||||
* Update using the {@literal $set} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/set/
|
||||
* @param key
|
||||
* @param value
|
||||
* @return
|
||||
@@ -91,8 +109,22 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $unset update modifier
|
||||
* Update using the {@literal $setOnInsert} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/setOnInsert/
|
||||
* @param key
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public Update setOnInsert(String key, Object value) {
|
||||
addMultiFieldOperation("$setOnInsert", key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the {@literal $unset} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/unset/
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
@@ -102,8 +134,9 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $inc update modifier
|
||||
* Update using the {@literal $inc} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/inc/
|
||||
* @param key
|
||||
* @param inc
|
||||
* @return
|
||||
@@ -114,8 +147,9 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $push update modifier
|
||||
* Update using the {@literal $push} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/push/
|
||||
* @param key
|
||||
* @param value
|
||||
* @return
|
||||
@@ -126,26 +160,46 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $pushAll update modifier
|
||||
* Update using {@code $push} modifier. <br/>
|
||||
* Allows creation of {@code $push} command for single or multiple (using {@code $each}) values.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/push/
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/each/
|
||||
* @param key
|
||||
* @return {@link PushOperatorBuilder} for given key
|
||||
*/
|
||||
public PushOperatorBuilder push(String key) {
|
||||
|
||||
if (!pushCommandBuilders.containsKey(key)) {
|
||||
pushCommandBuilders.put(key, new PushOperatorBuilder(key));
|
||||
}
|
||||
return pushCommandBuilders.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the {@code $pushAll} update modifier. <br>
|
||||
* <b>Note</b>: In mongodb 2.4 the usage of {@code $pushAll} has been deprecated in favor of {@code $push $each}.
|
||||
* {@link #push(String)}) returns a builder that can be used to populate the {@code $each} object.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/pushAll/
|
||||
* @param key
|
||||
* @param values
|
||||
* @return
|
||||
*/
|
||||
public Update pushAll(String key, Object[] values) {
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
DBObject keyValue = new BasicDBObject();
|
||||
keyValue.put(key, convertedValues);
|
||||
modifierOps.put("$pushAll", keyValue);
|
||||
addMultiFieldOperation("$pushAll", key, convertedValues);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $addToSet update modifier
|
||||
* Update using the {@literal $addToSet} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/addToSet/
|
||||
* @param key
|
||||
* @param value
|
||||
* @return
|
||||
@@ -156,8 +210,9 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $pop update modifier
|
||||
* Update using the {@literal $pop} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/pop/
|
||||
* @param key
|
||||
* @param pos
|
||||
* @return
|
||||
@@ -168,8 +223,9 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $pull update modifier
|
||||
* Update using the {@literal $pull} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/pull/
|
||||
* @param key
|
||||
* @param value
|
||||
* @return
|
||||
@@ -180,26 +236,27 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $pullAll update modifier
|
||||
* Update using the {@literal $pullAll} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/pullAll/
|
||||
* @param key
|
||||
* @param values
|
||||
* @return
|
||||
*/
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
DBObject keyValue = new BasicDBObject();
|
||||
keyValue.put(key, convertedValues);
|
||||
modifierOps.put("$pullAll", keyValue);
|
||||
addFieldOperation("$pullAll", key, convertedValues);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update using the $rename update modifier
|
||||
* Update using the {@literal $rename} update modifier
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/update/rename/
|
||||
* @param oldName
|
||||
* @param newName
|
||||
* @return
|
||||
@@ -217,8 +274,16 @@ public class Update {
|
||||
return dbo;
|
||||
}
|
||||
|
||||
protected void addFieldOperation(String operator, String key, Object value) {
|
||||
|
||||
Assert.hasText(key, "Key/Path for update must not be null or blank.");
|
||||
modifierOps.put(operator, new BasicDBObject(key, value));
|
||||
this.keysToUpdate.add(key);
|
||||
}
|
||||
|
||||
protected void addMultiFieldOperation(String operator, String key, Object value) {
|
||||
|
||||
Assert.hasText(key, "Key/Path for update must not be null or blank.");
|
||||
Object existingValue = this.modifierOps.get(operator);
|
||||
DBObject keyValueMap;
|
||||
|
||||
@@ -235,5 +300,146 @@ public class Update {
|
||||
}
|
||||
|
||||
keyValueMap.put(key, value);
|
||||
this.keysToUpdate.add(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if a given {@code key} will be touched on execution.
|
||||
*
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
public boolean modifies(String key) {
|
||||
return this.keysToUpdate.contains(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects given {@code key} for '$'.
|
||||
*
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
private static boolean isKeyword(String key) {
|
||||
return StringUtils.startsWithIgnoreCase(key, "$");
|
||||
}
|
||||
|
||||
/**
|
||||
* Modifiers holds a distinct collection of {@link Modifier}
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class Modifiers {
|
||||
|
||||
private HashMap<String, Modifier> modifiers;
|
||||
|
||||
public Modifiers() {
|
||||
this.modifiers = new LinkedHashMap<String, Modifier>(1);
|
||||
}
|
||||
|
||||
public Collection<Modifier> getModifiers() {
|
||||
return Collections.unmodifiableCollection(this.modifiers.values());
|
||||
}
|
||||
|
||||
public void addModifier(Modifier modifier) {
|
||||
this.modifiers.put(modifier.getKey(), modifier);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Marker interface of nested commands.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static interface Modifier {
|
||||
|
||||
/**
|
||||
* @return the command to send eg. {@code $push}
|
||||
*/
|
||||
String getKey();
|
||||
|
||||
/**
|
||||
* @return value to be sent with command
|
||||
*/
|
||||
Object getValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of {@link Modifier} representing {@code $each}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class Each implements Modifier {
|
||||
|
||||
private Object[] values;
|
||||
|
||||
public Each(Object... values) {
|
||||
this.values = extractValues(values);
|
||||
}
|
||||
|
||||
private Object[] extractValues(Object[] values) {
|
||||
|
||||
if (values == null || values.length == 0) {
|
||||
return values;
|
||||
}
|
||||
|
||||
if (values.length == 1 && values[0] instanceof Collection) {
|
||||
return ((Collection<?>) values[0]).toArray();
|
||||
}
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
|
||||
return convertedValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKey() {
|
||||
return "$each";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getValue() {
|
||||
return this.values;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@code $push} modifiers
|
||||
*
|
||||
* @author Christop Strobl
|
||||
*/
|
||||
public class PushOperatorBuilder {
|
||||
|
||||
private final String key;
|
||||
private final Modifiers modifiers;
|
||||
|
||||
PushOperatorBuilder(String key) {
|
||||
this.key = key;
|
||||
this.modifiers = new Modifiers();
|
||||
}
|
||||
|
||||
/**
|
||||
* Propagates {@code $each} to {@code $push}
|
||||
*
|
||||
* @param values
|
||||
* @return
|
||||
*/
|
||||
public Update each(Object... values) {
|
||||
|
||||
this.modifiers.addModifier(new Each(values));
|
||||
return Update.this.push(key, this.modifiers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Propagates {@link #value(Object)} to {@code $push}
|
||||
*
|
||||
* @param values
|
||||
* @return
|
||||
*/
|
||||
public Update value(Object value) {
|
||||
return Update.this.push(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,217 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.SpelNode;
|
||||
import org.springframework.expression.spel.ast.Literal;
|
||||
import org.springframework.expression.spel.ast.MethodReference;
|
||||
import org.springframework.expression.spel.ast.Operator;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* A value object for nodes in an expression. Allows iterating ove potentially available child {@link ExpressionNode}s.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class ExpressionNode implements Iterable<ExpressionNode> {
|
||||
|
||||
private static final Iterator<ExpressionNode> EMPTY_ITERATOR = Collections.<ExpressionNode> emptySet().iterator();
|
||||
|
||||
private final SpelNode node;
|
||||
private final ExpressionState state;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionNode} from the given {@link SpelNode} and {@link ExpressionState}.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param state must not be {@literal null}.
|
||||
*/
|
||||
protected ExpressionNode(SpelNode node, ExpressionState state) {
|
||||
|
||||
Assert.notNull(node, "SpelNode must not be null!");
|
||||
Assert.notNull(state, "ExpressionState must not be null!");
|
||||
|
||||
this.node = node;
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create {@link ExpressionNode}'s according to the given {@link SpelNode} and
|
||||
* {@link ExpressionState}.
|
||||
*
|
||||
* @param node
|
||||
* @param state must not be {@literal null}.
|
||||
* @return an {@link ExpressionNode} for the given {@link SpelNode} or {@literal null} if {@literal null} was given
|
||||
* for the {@link SpelNode}.
|
||||
*/
|
||||
public static ExpressionNode from(SpelNode node, ExpressionState state) {
|
||||
|
||||
if (node == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (node instanceof Operator) {
|
||||
return new OperatorNode((Operator) node, state);
|
||||
}
|
||||
|
||||
if (node instanceof MethodReference) {
|
||||
return new MethodReferenceNode((MethodReference) node, state);
|
||||
}
|
||||
|
||||
if (node instanceof Literal) {
|
||||
return new LiteralNode((Literal) node, state);
|
||||
}
|
||||
|
||||
return new ExpressionNode(node, state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getName() {
|
||||
return node.toStringAST();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current {@link ExpressionNode} is backed by the given type.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public boolean isOfType(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be empty!");
|
||||
return type.isAssignableFrom(node.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link ExpressionNode} is representing the same backing node type as the current one.
|
||||
*
|
||||
* @param node
|
||||
* @return
|
||||
*/
|
||||
boolean isOfSameTypeAs(ExpressionNode node) {
|
||||
return node == null ? false : this.node.getClass().equals(node.node.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExpressionNode} is a mathematical operation.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isMathematicalOperation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExpressionNode} is a literal.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isLiteral() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the current node.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Object getValue() {
|
||||
return node.getValue(state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current node has child nodes.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean hasChildren() {
|
||||
return node.getChildCount() != 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the child {@link ExpressionNode} with the given index.
|
||||
*
|
||||
* @param index must not be negative.
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getChild(int index) {
|
||||
|
||||
Assert.isTrue(index >= 0);
|
||||
return from(node.getChild(index), state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExpressionNode} has a first child node that is not of the given type.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public boolean hasfirstChildNotOfType(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
return hasChildren() && !node.getChild(0).getClass().equals(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionNode} from the given {@link SpelNode}.
|
||||
*
|
||||
* @param node
|
||||
* @return
|
||||
*/
|
||||
protected ExpressionNode from(SpelNode node) {
|
||||
return from(node, state);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
@Override
|
||||
public Iterator<ExpressionNode> iterator() {
|
||||
|
||||
if (!hasChildren()) {
|
||||
return EMPTY_ITERATOR;
|
||||
}
|
||||
|
||||
return new Iterator<ExpressionNode>() {
|
||||
|
||||
int index = 0;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return index < node.getChildCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExpressionNode next() {
|
||||
return from(node.getChild(index++));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,126 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* The context for an {@link ExpressionNode} transformation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class ExpressionTransformationContextSupport<T extends ExpressionNode> {
|
||||
|
||||
private final T currentNode;
|
||||
private final ExpressionNode parentNode;
|
||||
private final DBObject previousOperationObject;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExpressionTransformationContextSupport} for the given {@link ExpressionNode}s and an optional
|
||||
* previous operation.
|
||||
*
|
||||
* @param currentNode must not be {@literal null}.
|
||||
* @param parentNode
|
||||
* @param previousOperationObject
|
||||
*/
|
||||
public ExpressionTransformationContextSupport(T currentNode, ExpressionNode parentNode,
|
||||
DBObject previousOperationObject) {
|
||||
|
||||
Assert.notNull(currentNode, "currentNode must not be null!");
|
||||
|
||||
this.currentNode = currentNode;
|
||||
this.parentNode = parentNode;
|
||||
this.previousOperationObject = previousOperationObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public T getCurrentNode() {
|
||||
return currentNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the parent {@link ExpressionNode} or {@literal null} if none available.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getParentNode() {
|
||||
return parentNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the previously accumulated operaton object or {@literal null} if none available. Rather than manually
|
||||
* adding stuff to the object prefer using {@link #addToPreviousOrReturn(Object)} to transparently do if one is
|
||||
* present.
|
||||
*
|
||||
* @see #hasPreviousOperation()
|
||||
* @see #addToPreviousOrReturn(Object)
|
||||
* @return
|
||||
*/
|
||||
public DBObject getPreviousOperationObject() {
|
||||
return previousOperationObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether a previous operation is present.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean hasPreviousOperation() {
|
||||
return getPreviousOperationObject() != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the parent node is of the same operation as the current node.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean parentIsSameOperation() {
|
||||
return parentNode == null ? false : currentNode.isOfSameTypeAs(parentNode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given value to the previous operation and returns it.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public DBObject addToPreviousOperation(Object value) {
|
||||
extractArgumentListFrom(previousOperationObject).add(value);
|
||||
return previousOperationObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given value to the previous operation if one is present or returns the value to add as is.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public Object addToPreviousOrReturn(Object value) {
|
||||
return hasPreviousOperation() ? addToPreviousOperation(value) : value;
|
||||
}
|
||||
|
||||
private BasicDBList extractArgumentListFrom(DBObject context) {
|
||||
return (BasicDBList) context.get(context.keySet().iterator().next());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
/**
|
||||
* SPI interface to implement components that can transfrom an {@link ExpressionTransformationContextSupport} into an
|
||||
* object.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface ExpressionTransformer<T extends ExpressionTransformationContextSupport<?>> {
|
||||
|
||||
/**
|
||||
* Transforms the given {@link ExpressionTransformationContextSupport} into an Object.
|
||||
*
|
||||
* @param context will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object transform(T context);
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.ast.FloatLiteral;
|
||||
import org.springframework.expression.spel.ast.IntLiteral;
|
||||
import org.springframework.expression.spel.ast.Literal;
|
||||
import org.springframework.expression.spel.ast.LongLiteral;
|
||||
import org.springframework.expression.spel.ast.NullLiteral;
|
||||
import org.springframework.expression.spel.ast.RealLiteral;
|
||||
import org.springframework.expression.spel.ast.StringLiteral;
|
||||
|
||||
/**
|
||||
* A node representing a literal in an expression.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class LiteralNode extends ExpressionNode {
|
||||
|
||||
private final Literal literal;
|
||||
|
||||
/**
|
||||
* Creates a new {@link LiteralNode} from the given {@link Literal} and {@link ExpressionState}.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param state must not be {@literal null}.
|
||||
*/
|
||||
LiteralNode(Literal node, ExpressionState state) {
|
||||
super(node, state);
|
||||
this.literal = node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link ExpressionNode} is a unary minus.
|
||||
*
|
||||
* @param parent
|
||||
* @return
|
||||
*/
|
||||
public boolean isUnaryMinus(ExpressionNode parent) {
|
||||
|
||||
if (!(parent instanceof OperatorNode)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
OperatorNode operator = (OperatorNode) parent;
|
||||
return operator.isUnaryMinus() && operator.getRight() == null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.spel.ExpressionNode#isLiteral()
|
||||
*/
|
||||
@Override
|
||||
public boolean isLiteral() {
|
||||
return literal instanceof FloatLiteral || literal instanceof RealLiteral || literal instanceof IntLiteral
|
||||
|| literal instanceof LongLiteral || literal instanceof StringLiteral || literal instanceof NullLiteral;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.ast.MethodReference;
|
||||
|
||||
/**
|
||||
* An {@link ExpressionNode} representing a method reference.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MethodReferenceNode extends ExpressionNode {
|
||||
|
||||
private static final Map<String, String> FUNCTIONS;
|
||||
|
||||
static {
|
||||
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
|
||||
map.put("concat", "$concat"); // Concatenates two strings.
|
||||
map.put("strcasecmp", "$strcasecmp"); // Compares two strings and returns an integer that reflects the comparison.
|
||||
map.put("substr", "$substr"); // Takes a string and returns portion of that string.
|
||||
map.put("toLower", "$toLower"); // Converts a string to lowercase.
|
||||
map.put("toUpper", "$toUpper"); // Converts a string to uppercase.
|
||||
|
||||
map.put("dayOfYear", "$dayOfYear"); // Converts a date to a number between 1 and 366.
|
||||
map.put("dayOfMonth", "$dayOfMonth"); // Converts a date to a number between 1 and 31.
|
||||
map.put("dayOfWeek", "$dayOfWeek"); // Converts a date to a number between 1 and 7.
|
||||
map.put("year", "$year"); // Converts a date to the full year.
|
||||
map.put("month", "$month"); // Converts a date into a number between 1 and 12.
|
||||
map.put("week", "$week"); // Converts a date into a number between 0 and 53
|
||||
map.put("hour", "$hour"); // Converts a date into a number between 0 and 23.
|
||||
map.put("minute", "$minute"); // Converts a date into a number between 0 and 59.
|
||||
map.put("second", "$second"); // Converts a date into a number between 0 and 59. May be 60 to account for leap
|
||||
// seconds.
|
||||
map.put("millisecond", "$millisecond"); // Returns the millisecond portion of a date as an integer between 0 and
|
||||
|
||||
FUNCTIONS = Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
MethodReferenceNode(MethodReference reference, ExpressionState state) {
|
||||
super(reference, state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the method.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getMethodName() {
|
||||
|
||||
String name = getName();
|
||||
String methodName = name.substring(0, name.indexOf('('));
|
||||
return FUNCTIONS.get(methodName);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.expression.spel.ExpressionState;
|
||||
import org.springframework.expression.spel.ast.OpDivide;
|
||||
import org.springframework.expression.spel.ast.OpMinus;
|
||||
import org.springframework.expression.spel.ast.OpModulus;
|
||||
import org.springframework.expression.spel.ast.OpMultiply;
|
||||
import org.springframework.expression.spel.ast.OpPlus;
|
||||
import org.springframework.expression.spel.ast.Operator;
|
||||
|
||||
/**
|
||||
* An {@link ExpressionNode} representing an operator.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class OperatorNode extends ExpressionNode {
|
||||
|
||||
private static final Map<String, String> OPERATORS;
|
||||
|
||||
static {
|
||||
|
||||
Map<String, String> map = new HashMap<String, String>(6);
|
||||
|
||||
map.put("+", "$add");
|
||||
map.put("-", "$subtract");
|
||||
map.put("*", "$multiply");
|
||||
map.put("/", "$divide");
|
||||
map.put("%", "$mod");
|
||||
|
||||
OPERATORS = Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
private final Operator operator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link OperatorNode} from the given {@link Operator} and {@link ExpressionState}.
|
||||
*
|
||||
* @param node must not be {@literal null}.
|
||||
* @param state must not be {@literal null}.
|
||||
*/
|
||||
OperatorNode(Operator node, ExpressionState state) {
|
||||
super(node, state);
|
||||
this.operator = node;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.spel.ExpressionNode#isMathematicalOperation()
|
||||
*/
|
||||
@Override
|
||||
public boolean isMathematicalOperation() {
|
||||
return operator instanceof OpMinus || operator instanceof OpPlus || operator instanceof OpMultiply
|
||||
|| operator instanceof OpDivide || operator instanceof OpModulus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the operator is unary.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isUnaryOperator() {
|
||||
return operator.getRightOperand() == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Mongo expression of the operator.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getMongoOperator() {
|
||||
return OPERATORS.get(operator.getOperatorName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the operator is a unary minus, e.g. -1.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean isUnaryMinus() {
|
||||
return isUnaryOperator() && operator instanceof OpMinus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the left operand as {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getLeft() {
|
||||
return from(operator.getLeftOperand());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the right operand as {@link ExpressionNode}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ExpressionNode getRight() {
|
||||
return from(operator.getRightOperand());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support classes to transform SpEL expressions into MongoDB expressions.
|
||||
* @since 1.4
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.spel;
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.gridfs.GridFSFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface GridFsOperations extends ResourcePatternResolver {
|
||||
|
||||
@@ -126,7 +127,7 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* Returns all {@link GridFsResource} with the given file name.
|
||||
*
|
||||
* @param filename
|
||||
* @return
|
||||
* @return the resource if it exists or {@literal null}.
|
||||
* @see ResourcePatternResolver#getResource(String)
|
||||
*/
|
||||
GridFsResource getResource(String filename);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,6 +43,7 @@ import com.mongodb.gridfs.GridFSInputFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver {
|
||||
|
||||
@@ -158,7 +159,15 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#find(com.mongodb.DBObject)
|
||||
*/
|
||||
public List<GridFSDBFile> find(Query query) {
|
||||
return getGridFs().find(getMappedQuery(query));
|
||||
|
||||
if (query == null) {
|
||||
return getGridFs().find((DBObject) null);
|
||||
}
|
||||
|
||||
DBObject queryObject = getMappedQuery(query.getQueryObject());
|
||||
DBObject sortObject = getMappedQuery(query.getSortObject());
|
||||
|
||||
return getGridFs().find(queryObject, sortObject);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -190,7 +199,9 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
* @see org.springframework.core.io.ResourceLoader#getResource(java.lang.String)
|
||||
*/
|
||||
public GridFsResource getResource(String location) {
|
||||
return new GridFsResource(findOne(query(whereFilename().is(location))));
|
||||
|
||||
GridFSDBFile file = findOne(query(whereFilename().is(location)));
|
||||
return file != null ? new GridFsResource(file) : null;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -221,7 +232,11 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
}
|
||||
|
||||
private DBObject getMappedQuery(Query query) {
|
||||
return query == null ? null : queryMapper.getMappedObject(query.getQueryObject(), null);
|
||||
return query == null ? new Query().getQueryObject() : getMappedQuery(query.getQueryObject());
|
||||
}
|
||||
|
||||
private DBObject getMappedQuery(DBObject query) {
|
||||
return query == null ? null : queryMapper.getMappedObject(query, null);
|
||||
}
|
||||
|
||||
private GridFS getGridFs() {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2013 the original author or authors.
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -21,15 +21,19 @@ import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.data.annotation.QueryAnnotation;
|
||||
|
||||
/**
|
||||
* Annotation to declare finder queries directly on repository methods. Both attributes allow using a placeholder
|
||||
* notation of {@code ?0}, {@code ?1} and so on.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
@Documented
|
||||
@QueryAnnotation
|
||||
public @interface Query {
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -33,6 +33,7 @@ import javax.enterprise.inject.spi.ProcessBean;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.repository.cdi.CdiRepositoryBean;
|
||||
import org.springframework.data.repository.cdi.CdiRepositoryExtensionSupport;
|
||||
|
||||
/**
|
||||
@@ -76,26 +77,29 @@ public class MongoRepositoryExtension extends CdiRepositoryExtensionSupport {
|
||||
Set<Annotation> qualifiers = entry.getValue();
|
||||
|
||||
// Create the bean representing the repository.
|
||||
Bean<?> repositoryBean = createRepositoryBean(repositoryType, qualifiers, beanManager);
|
||||
CdiRepositoryBean<?> repositoryBean = createRepositoryBean(repositoryType, qualifiers, beanManager);
|
||||
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(String.format("Registering bean for %s with qualifiers %s.", repositoryType.getName(), qualifiers));
|
||||
}
|
||||
|
||||
// Register the bean to the container.
|
||||
registerBean(repositoryBean);
|
||||
afterBeanDiscovery.addBean(repositoryBean);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Bean}.
|
||||
* Creates a {@link CdiRepositoryBean} for the repository of the given type.
|
||||
*
|
||||
* @param <T> The type of the repository.
|
||||
* @param repositoryType The class representing the repository.
|
||||
* @param beanManager The BeanManager instance.
|
||||
* @return The bean.
|
||||
* @param <T> the type of the repository.
|
||||
* @param repositoryType the class representing the repository.
|
||||
* @param qualifiers the qualifiers to be applied to the bean.
|
||||
* @param beanManager the BeanManager instance.
|
||||
* @return
|
||||
*/
|
||||
private <T> Bean<T> createRepositoryBean(Class<T> repositoryType, Set<Annotation> qualifiers, BeanManager beanManager) {
|
||||
private <T> CdiRepositoryBean<T> createRepositoryBean(Class<T> repositoryType, Set<Annotation> qualifiers,
|
||||
BeanManager beanManager) {
|
||||
|
||||
// Determine the MongoOperations bean which matches the qualifiers of the repository.
|
||||
Bean<MongoOperations> mongoOperations = this.mongoOperations.get(qualifiers);
|
||||
|
||||
@@ -35,6 +35,7 @@ import org.springframework.data.repository.query.QueryLookupStrategy.Key;
|
||||
* {@link #basePackages()} or {@link #basePackageClasses()} it will trigger scanning of the package of annotated class.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Target(ElementType.TYPE)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -80,7 +81,7 @@ public @interface EnableMongoRepositories {
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String repositoryImplementationPostfix() default "";
|
||||
String repositoryImplementationPostfix() default "Impl";
|
||||
|
||||
/**
|
||||
* Configures the location of where to find the Spring Data named queries properties file. Will default to
|
||||
@@ -119,4 +120,10 @@ public @interface EnableMongoRepositories {
|
||||
* @return
|
||||
*/
|
||||
boolean createIndexesForQueryMethods() default false;
|
||||
|
||||
/**
|
||||
* Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the
|
||||
* repositories infrastructure.
|
||||
*/
|
||||
boolean considerNestedRepositories() default false;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -79,22 +79,24 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query query = createQuery(new ConvertingParameterAccessor(operations.getConverter(), accessor));
|
||||
|
||||
Object result = null;
|
||||
|
||||
if (method.isGeoNearQuery() && method.isPageQuery()) {
|
||||
|
||||
MongoParameterAccessor countAccessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query countQuery = createCountQuery(new ConvertingParameterAccessor(operations.getConverter(), countAccessor));
|
||||
|
||||
return new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
result = new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
} else if (method.isGeoNearQuery()) {
|
||||
return new GeoNearExecution(accessor).execute(query);
|
||||
} else if (method.isCollectionQuery()) {
|
||||
return new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
result = new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
} else if (method.isPageQuery()) {
|
||||
return new PagedExecution(accessor.getPageable()).execute(query);
|
||||
result = new PagedExecution(accessor.getPageable()).execute(query);
|
||||
} else {
|
||||
result = new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
}
|
||||
|
||||
Object result = new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
|
||||
if (result == null) {
|
||||
return result;
|
||||
}
|
||||
@@ -153,7 +155,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class CollectionExecution extends Execution {
|
||||
final class CollectionExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
@@ -176,7 +178,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class PagedExecution extends Execution {
|
||||
final class PagedExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
@@ -213,7 +215,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class SingleEntityExecution extends Execution {
|
||||
final class SingleEntityExecution extends Execution {
|
||||
|
||||
private final boolean countProjection;
|
||||
|
||||
@@ -239,7 +241,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class GeoNearExecution extends Execution {
|
||||
final class GeoNearExecution extends Execution {
|
||||
|
||||
private final MongoParameterAccessor accessor;
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
|
||||
@@ -35,6 +36,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor.PotentiallyConvertingIterator;
|
||||
import org.springframework.data.repository.query.parser.AbstractQueryCreator;
|
||||
import org.springframework.data.repository.query.parser.Part;
|
||||
import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -43,6 +45,7 @@ import org.springframework.util.Assert;
|
||||
* Custom query creator to create Mongo criterias.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
@@ -99,7 +102,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = context.getPersistentPropertyPath(part.getProperty());
|
||||
MongoPersistentProperty property = path.getLeafProperty();
|
||||
Criteria criteria = from(part.getType(), property,
|
||||
Criteria criteria = from(part, property,
|
||||
where(path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)),
|
||||
(PotentiallyConvertingIterator) iterator);
|
||||
|
||||
@@ -120,7 +123,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = context.getPersistentPropertyPath(part.getProperty());
|
||||
MongoPersistentProperty property = path.getLeafProperty();
|
||||
|
||||
return from(part.getType(), property,
|
||||
return from(part, property,
|
||||
base.and(path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)),
|
||||
(PotentiallyConvertingIterator) iterator);
|
||||
}
|
||||
@@ -165,9 +168,11 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @param parameters
|
||||
* @return
|
||||
*/
|
||||
private Criteria from(Type type, MongoPersistentProperty property, Criteria criteria,
|
||||
private Criteria from(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
|
||||
Type type = part.getType();
|
||||
|
||||
switch (type) {
|
||||
case AFTER:
|
||||
case GREATER_THAN:
|
||||
@@ -193,8 +198,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
case CONTAINING:
|
||||
String value = parameters.next().toString();
|
||||
return criteria.regex(toLikeRegex(value, type));
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
case REGEX:
|
||||
return criteria.regex(parameters.next().toString());
|
||||
case EXISTS:
|
||||
@@ -220,19 +224,103 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
criteria.maxDistance(distance.getNormalizedValue());
|
||||
}
|
||||
return criteria;
|
||||
|
||||
case WITHIN:
|
||||
|
||||
Object parameter = parameters.next();
|
||||
return criteria.within((Shape) parameter);
|
||||
case SIMPLE_PROPERTY:
|
||||
return criteria.is(parameters.nextConverted(property));
|
||||
|
||||
return isSimpleComparisionPossible(part) ? criteria.is(parameters.nextConverted(property))
|
||||
: createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, false);
|
||||
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
return criteria.ne(parameters.nextConverted(property));
|
||||
|
||||
return isSimpleComparisionPossible(part) ? criteria.ne(parameters.nextConverted(property))
|
||||
: createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, true);
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported keyword!");
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSimpleComparisionPossible(Part part) {
|
||||
|
||||
switch (part.shouldIgnoreCase()) {
|
||||
case NEVER:
|
||||
return true;
|
||||
case WHEN_POSSIBLE:
|
||||
return part.getProperty().getType() != String.class;
|
||||
case ALWAYS:
|
||||
return false;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and extends the given criteria with a like-regex if necessary.
|
||||
*
|
||||
* @param part
|
||||
* @param property
|
||||
* @param criteria
|
||||
* @param parameters
|
||||
* @param shouldNegateExpression
|
||||
* @return the criteria extended with the like-regex.
|
||||
*/
|
||||
private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters, boolean shouldNegateExpression) {
|
||||
|
||||
switch (part.shouldIgnoreCase()) {
|
||||
|
||||
case ALWAYS:
|
||||
if (part.getProperty().getType() != String.class) {
|
||||
throw new IllegalArgumentException(String.format("part %s must be of type String but was %s",
|
||||
part.getProperty(), part.getType()));
|
||||
}
|
||||
// fall-through
|
||||
|
||||
case WHEN_POSSIBLE:
|
||||
if (shouldNegateExpression) {
|
||||
criteria = criteria.not();
|
||||
}
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.nextConverted(property).toString());
|
||||
|
||||
case NEVER:
|
||||
// intentional no-op
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("part.shouldCaseIgnore must be one of %s, but was %s",
|
||||
Arrays.asList(IgnoreCaseType.ALWAYS, IgnoreCaseType.WHEN_POSSIBLE), part.shouldIgnoreCase()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an appropriate like-regex and appends it to the given criteria.
|
||||
*
|
||||
* @param criteria
|
||||
* @param part
|
||||
* @param value
|
||||
* @return the criteria extended with the regex.
|
||||
*/
|
||||
private Criteria addAppropriateLikeRegexTo(Criteria criteria, Part part, String value) {
|
||||
|
||||
return criteria.regex(toLikeRegex(value, part), toRegexOptions(part));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param part
|
||||
* @return the regex options or {@literal null}.
|
||||
*/
|
||||
private String toRegexOptions(Part part) {
|
||||
|
||||
String regexOptions = null;
|
||||
switch (part.shouldIgnoreCase()) {
|
||||
case WHEN_POSSIBLE:
|
||||
case ALWAYS:
|
||||
regexOptions = "i";
|
||||
case NEVER:
|
||||
}
|
||||
return regexOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next element from the given {@link Iterator} expecting it to be of a certain type.
|
||||
*
|
||||
@@ -265,7 +353,9 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return new Object[] { next };
|
||||
}
|
||||
|
||||
private String toLikeRegex(String source, Type type) {
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
Type type = part.getType();
|
||||
|
||||
switch (type) {
|
||||
case STARTING_WITH:
|
||||
@@ -277,6 +367,9 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
case CONTAINING:
|
||||
source = "*" + source + "*";
|
||||
break;
|
||||
case SIMPLE_PROPERTY:
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
source = "^" + source + "$";
|
||||
default:
|
||||
}
|
||||
|
||||
|
||||
@@ -19,10 +19,14 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* {@link RepositoryQuery} implementation for Mongo.
|
||||
@@ -66,7 +70,24 @@ public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
protected Query createQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, accessor, context, isGeoNearQuery);
|
||||
return creator.createQuery();
|
||||
Query query = creator.createQuery();
|
||||
|
||||
String fieldSpec = this.getQueryMethod().getFieldSpecification();
|
||||
|
||||
if (!StringUtils.hasText(fieldSpec)) {
|
||||
return query;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
BasicQuery result = new BasicQuery(query.getQueryObject().toString(), fieldSpec);
|
||||
result.setSortObject(query.getSortObject());
|
||||
return result;
|
||||
|
||||
} catch (JSONParseException o_O) {
|
||||
throw new IllegalStateException(String.format("Invalid query or field specification in %s!", getQueryMethod(),
|
||||
o_O));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.DBObject;
|
||||
import com.mysema.query.mongodb.MongodbSerializer;
|
||||
import com.mysema.query.types.Path;
|
||||
import com.mysema.query.types.PathMetadata;
|
||||
import com.mysema.query.types.PathType;
|
||||
|
||||
/**
|
||||
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
|
||||
@@ -61,6 +62,10 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
@Override
|
||||
protected String getKeyForPath(Path<?> expr, PathMetadata<?> metadata) {
|
||||
|
||||
if (!metadata.getPathType().equals(PathType.PROPERTY)) {
|
||||
return super.getKeyForPath(expr, metadata);
|
||||
}
|
||||
|
||||
Path<?> parent = metadata.getParent();
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
|
||||
MongoPersistentProperty property = entity.getPersistentProperty(metadata.getName());
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.util;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class DBObjectUtils {
|
||||
|
||||
public static BasicDBList dbList(Object... items) {
|
||||
|
||||
BasicDBList list = new BasicDBList();
|
||||
for (Object item : items) {
|
||||
list.add(item);
|
||||
}
|
||||
return list;
|
||||
}
|
||||
}
|
||||
@@ -1,20 +1,20 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
public interface MongoDocumentWriter {
|
||||
|
||||
}
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/**
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
package org.springframework.data.mongodb.util;
|
||||
@@ -2,4 +2,5 @@ http\://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd=org/sprin
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.1.xsd=org/springframework/data/mongodb/config/spring-mongo-1.1.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.2.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.3.xsd=org/springframework/data/mongodb/config/spring-mongo-1.3.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.3.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.4.xsd=org/springframework/data/mongodb/config/spring-mongo-1.4.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.4.xsd
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -209,7 +209,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -248,7 +248,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -259,7 +259,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -197,7 +197,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -246,7 +246,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -257,7 +257,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -197,7 +197,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -261,7 +261,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -272,7 +272,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -276,7 +276,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -287,7 +287,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -0,0 +1,639 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<xsd:schema xmlns="http://www.springframework.org/schema/data/mongo"
|
||||
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:beans="http://www.springframework.org/schema/beans"
|
||||
xmlns:tool="http://www.springframework.org/schema/tool"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:repository="http://www.springframework.org/schema/data/repository"
|
||||
targetNamespace="http://www.springframework.org/schema/data/mongo"
|
||||
elementFormDefault="qualified" attributeFormDefault="unqualified">
|
||||
|
||||
<xsd:import namespace="http://www.springframework.org/schema/beans" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/tool" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/context" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/data/repository"
|
||||
schemaLocation="http://www.springframework.org/schema/data/repository/spring-repository.xsd" />
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.Mongo"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="db-factory">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongo instance. If not configured a default com.mongodb.Mongo instance will be created.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="dbname" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the database to connect to. Default is 'db'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="authentication-dbname" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the authentication database to connect to. Default is 'db'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="username" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The username to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="password" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The password to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="uri" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The Mongo URI string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:attributeGroup name="mongo-repository-attributes">
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" default="mongoTemplate">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="create-query-indexes" type="xsd:boolean" default="false">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
Enables creation of indexes for queries that get derived from the method name
|
||||
and thus reference domain class properties. Defaults to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:attributeGroup>
|
||||
|
||||
<xsd:element name="repositories">
|
||||
<xsd:complexType>
|
||||
<xsd:complexContent>
|
||||
<xsd:extension base="repository:repositories">
|
||||
<xsd:attributeGroup ref="mongo-repository-attributes"/>
|
||||
<xsd:attributeGroup ref="repository:repository-attributes"/>
|
||||
</xsd:extension>
|
||||
</xsd:complexContent>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="mapping-converter">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[Defines a MongoConverter for getting rich mapping functionality.]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:exports type="org.springframework.data.mongodb.core.convert.MappingMongoConverter" />
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="custom-converters" minOccurs="0">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Top-level element that contains one or more custom converters to be used for mapping
|
||||
domain objects to and from Mongo's DBObject]]>
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="converter" type="customConverterType" minOccurs="0" maxOccurs="unbounded"/>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="base-package" type="xsd:string" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the MappingMongoConverter instance (by default "mappingConverter").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="base-package" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The base package in which to scan for entities annotated with @Document
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="type-mapper-ref" type="typeMapperRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a MongoTypeMapper to be used by this MappingMongoConverter.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mapping.model.MappingContext">
|
||||
The reference to a MappingContext. Will default to 'mappingContext'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="disable-validation" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener">
|
||||
Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="abbreviate-field-names" use="optional" default="false">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy">
|
||||
Enables abbreviating the field names for domain class properties to the
|
||||
first character of their camel case names, e.g. fooBar -> fb.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="jmx">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a JMX Model MBeans for monitoring a MongoDB server'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the Mongo object that determines what server to monitor. (by default "mongo").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="auditing">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="org.springframework.data.mongodb.core.mapping.event.AuditingEventListener" />
|
||||
<tool:exports type="org.springframework.data.auditing.IsNewAwareAuditingHandler" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attributeGroup ref="repository:auditing-attributes" />
|
||||
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:simpleType name="typeMapperRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoTypeMapper"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mappingContextRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mapping.model.MappingContext"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mongoTemplateRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mongoRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="sslSocketFactoryRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="javax.net.ssl.SSLSocketFactory"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="writeConcernEnumeration">
|
||||
<xsd:restriction base="xsd:token">
|
||||
<xsd:enumeration value="NONE" />
|
||||
<xsd:enumeration value="NORMAL" />
|
||||
<xsd:enumeration value="SAFE" />
|
||||
<xsd:enumeration value="FSYNC_SAFE" />
|
||||
<xsd:enumeration value="REPLICAS_SAFE" />
|
||||
<xsd:enumeration value="JOURNAL_SAFE" />
|
||||
<xsd:enumeration value="MAJORITY" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
<!-- MLP
|
||||
<xsd:attributeGroup name="writeConcern">
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:simpleType>
|
||||
<xsd:restriction base="xsd:string">
|
||||
<xsd:enumeration value="NONE" />
|
||||
<xsd:enumeration value="NORMAL" />
|
||||
<xsd:enumeration value="SAFE" />
|
||||
<xsd:enumeration value="FSYNC_SAFE" />
|
||||
<xsd:enumeration value="REPLICA_SAFE" />
|
||||
<xsd:enumeration value="JOURNAL_SAFE" />
|
||||
<xsd:enumeration value="MAJORITY" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:attributeGroup>
|
||||
-->
|
||||
<xsd:complexType name="mongoType">
|
||||
<xsd:sequence minOccurs="0" maxOccurs="1">
|
||||
<xsd:element name="options" type="optionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The Mongo driver options
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.MongoOptions"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<!-- MLP
|
||||
<xsd:attributeGroup ref="writeConcern" />
|
||||
-->
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongo").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="replica-set" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="optionsType">
|
||||
<xsd:attribute name="connections-per-host" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The number of connections allowed per host. Will block if run out. Default is 10. System property MONGO.POOLSIZE can override
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="threads-allowed-to-block-for-connection-multiplier" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The multiplier for connectionsPerHost for # of threads that can block. Default is 5.
|
||||
If connectionsPerHost is 10, and threadsAllowedToBlockForConnectionMultiplier is 5,
|
||||
then 50 threads can block more than that and an exception will be thrown.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-wait-time" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="connect-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The connect timeout in milliseconds. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The socket timeout. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-keep-alive" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="auto-connect-retry" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls whether or not on a connect, the system retries automatically. Default is false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-auto-connect-retry-time" type="xsd:long">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The maximum amount of time in millisecons to spend retrying to open connection to the same server. Default is 0, which means to use the default 15s if autoConnectRetry is on.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-number" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This specifies the number of servers to wait for on the write operation, and exception raising behavior. The 'w' option to the getlasterror command. Defaults to 0.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls timeout for write operations in milliseconds. The 'wtimeout' option to the getlasterror command. Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-fsync" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="slave-ok" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls if the driver is allowed to read from secondaries or slaves. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="ssl" type="xsd:boolean">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls if the driver should us an SSL connection. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="ssl-socket-factory-ref" type="sslSocketFactoryRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The SSLSocketFactory to use for the SSL connection. If none is configured here, SSLSocketFactory#getDefault() will be used.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:group name="beanElementGroup">
|
||||
<xsd:choice>
|
||||
<xsd:element ref="beans:bean"/>
|
||||
<xsd:element ref="beans:ref"/>
|
||||
</xsd:choice>
|
||||
</xsd:group>
|
||||
|
||||
<xsd:complexType name="customConverterType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Element defining a custom converterr.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:group ref="beanElementGroup" minOccurs="0" maxOccurs="1"/>
|
||||
<xsd:attribute name="ref" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
A reference to a custom converter.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref"/>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:simpleType name="converterRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:element name="template">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="converter-ref" type="converterRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongoconverter instance.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string"
|
||||
use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to
|
||||
type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="gridFsTemplate">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="converter-ref" type="converterRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongoconverter instance.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="bucket" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The GridFs bucket string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:schema>
|
||||
@@ -1,48 +1,48 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
import org.springframework.data.mongodb.core.mapping.event.LoggingEventListener;
|
||||
|
||||
@Configuration
|
||||
public class GeoSpatialAppConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public String getDatabaseName() {
|
||||
return "database";
|
||||
}
|
||||
|
||||
@Override
|
||||
@Bean
|
||||
public Mongo mongo() throws Exception {
|
||||
return new Mongo("127.0.0.1");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public LoggingEventListener mappingEventsListener() {
|
||||
return new LoggingEventListener();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMappingBasePackage() {
|
||||
return "org.springframework.data.mongodb.core.core";
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Sample configuration class in default package.
|
||||
*
|
||||
* @see DATAMONGO-877
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Configuration
|
||||
public class ConfigClassInDefaultPackage extends AbstractMongoConfiguration {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#getDatabaseName()
|
||||
*/
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "default";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#mongo()
|
||||
*/
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import org.junit.Test;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
|
||||
/**
|
||||
* Unit test for {@link ConfigClassInDefaultPackage}.
|
||||
*
|
||||
* @see DATAMONGO-877
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class ConfigClassInDefaultPackageUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-877
|
||||
*/
|
||||
@Test
|
||||
public void loadsConfigClassFromDefaultPackage() {
|
||||
new AnnotationConfigApplicationContext(ConfigClassInDefaultPackage.class).close();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public abstract class AbstractIntegrationTests {
|
||||
|
||||
@Configuration
|
||||
static class TestConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "database";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
}
|
||||
|
||||
@Autowired MongoOperations operations;
|
||||
|
||||
@Before
|
||||
@After
|
||||
public void cleanUp() {
|
||||
|
||||
for (String collectionName : operations.getCollectionNames()) {
|
||||
if (!collectionName.startsWith("system")) {
|
||||
operations.execute(collectionName, new CollectionCallback<Void>() {
|
||||
@Override
|
||||
public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
collection.remove(new BasicDBObject());
|
||||
assertThat(collection.find().hasNext(), is(false));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,7 @@ import org.springframework.beans.factory.NoSuchBeanDefinitionException;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.support.AbstractApplicationContext;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoTypeMapper;
|
||||
@@ -35,6 +36,7 @@ import org.springframework.expression.spel.support.StandardEvaluationContext;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link AbstractMongoConfiguration}.
|
||||
@@ -84,12 +86,13 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
@Test
|
||||
public void containsMongoDbFactoryButNoMongoBean() {
|
||||
|
||||
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
|
||||
AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
|
||||
|
||||
assertThat(context.getBean(MongoDbFactory.class), is(notNullValue()));
|
||||
|
||||
exception.expect(NoSuchBeanDefinitionException.class);
|
||||
context.getBean(Mongo.class);
|
||||
context.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -109,12 +112,13 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
@Test
|
||||
public void lifecycleCallbacksAreInvokedInAppropriateOrder() {
|
||||
|
||||
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
|
||||
AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
|
||||
MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class);
|
||||
BasicMongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(Entity.class);
|
||||
StandardEvaluationContext spElContext = (StandardEvaluationContext) ReflectionTestUtils.getField(entity, "context");
|
||||
|
||||
assertThat(spElContext.getBeanResolver(), is(notNullValue()));
|
||||
context.close();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -123,12 +127,21 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
@Test
|
||||
public void shouldBeAbleToConfigureCustomTypeMapperViaJavaConfig() {
|
||||
|
||||
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
|
||||
AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
|
||||
MongoTypeMapper typeMapper = context.getBean(CustomMongoTypeMapper.class);
|
||||
MappingMongoConverter mmc = context.getBean(MappingMongoConverter.class);
|
||||
|
||||
assertThat(mmc, is(notNullValue()));
|
||||
assertThat(mmc.getTypeMapper(), is(typeMapper));
|
||||
context.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-789
|
||||
*/
|
||||
@Test
|
||||
public void authenticationDatabaseShouldDefaultToNull() {
|
||||
assertThat(new SampleMongoConfiguration().getAuthenticationDatabaseName(), is(nullValue()));
|
||||
}
|
||||
|
||||
private static void assertScanningDisabled(final String value) throws ClassNotFoundException {
|
||||
@@ -154,7 +167,7 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new Mongo();
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,7 +20,7 @@ import static org.junit.Assert.*;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.support.AbstractApplicationContext;
|
||||
import org.springframework.context.support.ClassPathXmlApplicationContext;
|
||||
import org.springframework.data.annotation.CreatedDate;
|
||||
import org.springframework.data.annotation.Id;
|
||||
@@ -35,9 +35,9 @@ import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
public class AuditingIntegrationTests {
|
||||
|
||||
@Test
|
||||
public void enablesAuditingAndSetsPropertiesAccordingly() {
|
||||
public void enablesAuditingAndSetsPropertiesAccordingly() throws Exception {
|
||||
|
||||
ApplicationContext context = new ClassPathXmlApplicationContext("auditing.xml", getClass());
|
||||
AbstractApplicationContext context = new ClassPathXmlApplicationContext("auditing.xml", getClass());
|
||||
|
||||
Entity entity = new Entity();
|
||||
BeforeConvertEvent<Entity> event = new BeforeConvertEvent<Entity>(entity);
|
||||
@@ -46,23 +46,20 @@ public class AuditingIntegrationTests {
|
||||
assertThat(entity.created, is(notNullValue()));
|
||||
assertThat(entity.modified, is(entity.created));
|
||||
|
||||
Thread.sleep(10);
|
||||
entity.id = 1L;
|
||||
event = new BeforeConvertEvent<Entity>(entity);
|
||||
context.publishEvent(event);
|
||||
|
||||
assertThat(entity.created, is(notNullValue()));
|
||||
assertThat(entity.modified, is(not(entity.created)));
|
||||
context.close();
|
||||
}
|
||||
|
||||
class Entity {
|
||||
|
||||
@CreatedDate
|
||||
DateTime created;
|
||||
|
||||
@LastModifiedDate
|
||||
DateTime modified;
|
||||
|
||||
@Id
|
||||
Long id;
|
||||
@CreatedDate DateTime created;
|
||||
@LastModifiedDate DateTime modified;
|
||||
@Id Long id;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.domain.AuditorAware;
|
||||
import org.springframework.data.mongodb.core.AuditablePerson;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
|
||||
import org.springframework.stereotype.Repository;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Integration tests for auditing via Java config.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class AuditingViaJavaConfigRepositoriesTests {
|
||||
|
||||
@Autowired AuditablePersonRepository auditablePersonRepository;
|
||||
@Autowired AuditorAware<AuditablePerson> auditorAware;
|
||||
AuditablePerson auditor;
|
||||
|
||||
@Configuration
|
||||
@EnableMongoAuditing(auditorAwareRef = "auditorProvider")
|
||||
@EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true)
|
||||
static class Config {
|
||||
|
||||
@Bean
|
||||
public MongoOperations mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "database"));
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MongoMappingContext mappingContext() {
|
||||
return new MongoMappingContext();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@SuppressWarnings("unchecked")
|
||||
public AuditorAware<AuditablePerson> auditorProvider() {
|
||||
return mock(AuditorAware.class);
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
auditablePersonRepository.deleteAll();
|
||||
this.auditor = auditablePersonRepository.save(new AuditablePerson("auditor"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void basicAuditing() {
|
||||
|
||||
doReturn(this.auditor).when(this.auditorAware).getCurrentAuditor();
|
||||
|
||||
AuditablePerson user = new AuditablePerson("user");
|
||||
|
||||
AuditablePerson savedUser = auditablePersonRepository.save(user);
|
||||
System.out.println(savedUser);
|
||||
|
||||
AuditablePerson createdBy = savedUser.getCreatedBy();
|
||||
assertThat(createdBy, is(notNullValue()));
|
||||
assertThat(createdBy.getFirstname(), is(this.auditor.getFirstname()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-843
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("resource")
|
||||
public void defaultsMappingContextIfNoneConfigured() {
|
||||
new AnnotationConfigApplicationContext(SampleConfig.class);
|
||||
}
|
||||
|
||||
@Repository
|
||||
static interface AuditablePersonRepository extends MongoRepository<AuditablePerson, String> {}
|
||||
|
||||
@Configuration
|
||||
@EnableMongoAuditing
|
||||
static class SampleConfig {
|
||||
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user