Compare commits
123 Commits
1.2.3.RELE
...
1.3.2.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c2aacc03ff | ||
|
|
1cf544a530 | ||
|
|
bbb097cafc | ||
|
|
feafd50b59 | ||
|
|
b51cf05f90 | ||
|
|
b8196ac9ed | ||
|
|
e643d39fa6 | ||
|
|
6abdb0aa46 | ||
|
|
34063ff647 | ||
|
|
857f366b56 | ||
|
|
f7540d45c6 | ||
|
|
3d2ae8117f | ||
|
|
6b3bd8f621 | ||
|
|
b17ec47003 | ||
|
|
8c7b558d39 | ||
|
|
a3faabf718 | ||
|
|
1a46abfbb9 | ||
|
|
61284228dd | ||
|
|
8cb92de1ee | ||
|
|
5d3cc3fa04 | ||
|
|
c0b99740dc | ||
|
|
595bbd3aa7 | ||
|
|
5d2fc31164 | ||
|
|
a9dc0fae69 | ||
|
|
0605c7b753 | ||
|
|
21352a8829 | ||
|
|
58e1d2dbd9 | ||
|
|
4f7821e3c2 | ||
|
|
9dd866e34a | ||
|
|
def6079795 | ||
|
|
f3f537c1a6 | ||
|
|
ad44db386b | ||
|
|
bcc3bf61b6 | ||
|
|
1a28a294d1 | ||
|
|
14623a3655 | ||
|
|
6dcaa31897 | ||
|
|
e57fe346c0 | ||
|
|
7dd94949d5 | ||
|
|
966f971bee | ||
|
|
aa23c579e8 | ||
|
|
6b634d08ce | ||
|
|
b7b61405f9 | ||
|
|
4d65aa7207 | ||
|
|
c129c706a3 | ||
|
|
7823385ac7 | ||
|
|
21fcfe11c2 | ||
|
|
bfe33a446c | ||
|
|
9be50316c3 | ||
|
|
30513267af | ||
|
|
d3d480e79b | ||
|
|
c39ad1bbc4 | ||
|
|
fcdc29df49 | ||
|
|
de7120d8dd | ||
|
|
84df02ae38 | ||
|
|
d6c5907940 | ||
|
|
b2fe54c0a1 | ||
|
|
47a198c688 | ||
|
|
5d9dbda03b | ||
|
|
36d52862bc | ||
|
|
0afbf6fe19 | ||
|
|
b0bf8cb718 | ||
|
|
567a8d9d5b | ||
|
|
ceef18d7a4 | ||
|
|
4f57712f12 | ||
|
|
478396c503 | ||
|
|
aa80d1ad0a | ||
|
|
fd28ab4d33 | ||
|
|
187c80dfcc | ||
|
|
389a3ac066 | ||
|
|
297bd3e3dd | ||
|
|
b11fba3321 | ||
|
|
3c68671d86 | ||
|
|
b171f4192d | ||
|
|
21a1ce985c | ||
|
|
97caba50bf | ||
|
|
818f739d5a | ||
|
|
a44c1fdd2d | ||
|
|
6b35ca80d4 | ||
|
|
23b276745c | ||
|
|
be0092d3f5 | ||
|
|
f36792d419 | ||
|
|
31393db6ff | ||
|
|
8b50af07ce | ||
|
|
0eb315a758 | ||
|
|
976f5dd0e3 | ||
|
|
f614364918 | ||
|
|
38a86033be | ||
|
|
d11c20d548 | ||
|
|
3e2387ae6b | ||
|
|
e8a1caec53 | ||
|
|
44c0b14018 | ||
|
|
3daf3fc95b | ||
|
|
bd3aac8342 | ||
|
|
94f697da10 | ||
|
|
0cdec56a27 | ||
|
|
9d83331f9f | ||
|
|
071cd1647f | ||
|
|
92af5aa345 | ||
|
|
c07ad0fdf6 | ||
|
|
04e0f5c4a7 | ||
|
|
133975fb44 | ||
|
|
9a372a57e0 | ||
|
|
e67644094a | ||
|
|
c5a99b5b5e | ||
|
|
9564bcb280 | ||
|
|
f1e961a1ee | ||
|
|
0c69c87787 | ||
|
|
48b4a88a6a | ||
|
|
d0c0866f88 | ||
|
|
ab18bb5b96 | ||
|
|
509be3d681 | ||
|
|
8e01f95b29 | ||
|
|
4dcec1f6e2 | ||
|
|
d3bf6c0a19 | ||
|
|
3410a0589c | ||
|
|
7a64766496 | ||
|
|
e56a8597b8 | ||
|
|
e13208b4b3 | ||
|
|
6139e83d8d | ||
|
|
f33790013f | ||
|
|
bf81d95d21 | ||
|
|
158e4f033c | ||
|
|
81097061ad |
154
README.md
154
README.md
@@ -1,96 +1,68 @@
|
||||
Spring Data MongoDB
|
||||
======================
|
||||
# Spring Data MongoDB
|
||||
|
||||
The primary goal of the [Spring Data](http://www.springsource.org/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB aims to provide a familiar and consistent Spring-based programming model for for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a Repository style data access layer
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer.
|
||||
|
||||
Getting Help
|
||||
------------
|
||||
## Getting Help
|
||||
|
||||
For a comprehensive treatmet of all the Spring Data MongoDB features, please refer to the The [User Guide](http://static.springsource.org/spring-data/data-mongodb/docs/current/reference/html/)
|
||||
For a comprehensive treatment of all the Spring Data MongoDB features, please refer to:
|
||||
|
||||
The [JavaDocs](http://static.springsource.org/spring-data/data-mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
|
||||
The home page of [Spring Data MongoDB](http://www.springsource.org/spring-data/mongodb) contains links to articles and other resources.
|
||||
|
||||
For more detailed questions, use the [forum](http://forum.springsource.org/forumdisplay.php?f=80).
|
||||
* the [User Guide](http://static.springsource.org/spring-data/data-mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://static.springsource.org/spring-data/data-mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](http://www.springsource.org/spring-data/mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use the [forum](http://forum.springsource.org/forumdisplay.php?f=80).
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://www.springsource.org/projects).
|
||||
|
||||
|
||||
Quick Start
|
||||
-----------
|
||||
## Quick Start
|
||||
|
||||
## MongoDB
|
||||
### Maven configuration
|
||||
|
||||
For those in a hurry:
|
||||
|
||||
|
||||
* Download the jar through Maven:
|
||||
Add the Maven dependency:
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.4.0.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>http://repo.springsource.org/libs-snapshot</url>
|
||||
</repository>
|
||||
```
|
||||
|
||||
### MongoTemplate
|
||||
MongoTemplate is the central support class for Mongo database operations. It provides
|
||||
|
||||
MongoTemplate is the central support class for Mongo database operations. It provides:
|
||||
|
||||
* Basic POJO mapping support to and from BSON
|
||||
* Connection Affinity callback
|
||||
* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.)
|
||||
* Connection affinity callback
|
||||
* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/dao.html#dao-exceptions).
|
||||
|
||||
Future plans are to support optional logging and/or exception throwing based on WriteResult return value, common map-reduce operations, GridFS operations. A simple API for partial document updates is also planned.
|
||||
### Spring Data repositories
|
||||
|
||||
### Easy Data Repository generation
|
||||
To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface.
|
||||
|
||||
To simplify the creation of data repositories a generic `Repository` interface and default implementation is provided. Furthermore, Spring will automatically create a Repository implementation for you that adds implementations of finder methods you specify on an interface.
|
||||
|
||||
The Repository interface is
|
||||
For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below:
|
||||
|
||||
```java
|
||||
public interface Repository<T, ID extends Serializable> {
|
||||
|
||||
T save(T entity);
|
||||
|
||||
List<T> save(Iterable<? extends T> entities);
|
||||
|
||||
T findById(ID id);
|
||||
|
||||
boolean exists(ID id);
|
||||
|
||||
List<T> findAll();
|
||||
|
||||
Long count();
|
||||
|
||||
void delete(T entity);
|
||||
|
||||
void delete(Iterable<? extends T> entities);
|
||||
|
||||
void deleteAll();
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
The `MongoRepository` extends `Repository` and will in future add more Mongo specific methods.
|
||||
|
||||
```java
|
||||
public interface MongoRepository<T, ID extends Serializable> extends Repository<T, ID> {
|
||||
}
|
||||
```
|
||||
|
||||
`SimpleMongoRepository` is the out of the box implementation of the `MongoRepository` you can use for basid CRUD operations.
|
||||
|
||||
To go beyond basic CRUD, extend the `MongoRepository` interface and supply your own finder methods that follow simple naming conventions such that they can be easily converted into queries.
|
||||
|
||||
For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a regular expression is shown below
|
||||
|
||||
```java
|
||||
public interface PersonRepository extends MongoRepository<Person, Long> {
|
||||
public interface PersonRepository extends CrudRepository<Person, Long> {
|
||||
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
@@ -98,32 +70,56 @@ public interface PersonRepository extends MongoRepository<Person, Long> {
|
||||
}
|
||||
```
|
||||
|
||||
You can have Spring automatically create a proxy for the interface as shown below:
|
||||
The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them.
|
||||
|
||||
You can have Spring automatically create a proxy for the interface by using the following JavaConfig:
|
||||
|
||||
```java
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new Mongo();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "springdata";
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML:
|
||||
|
||||
```xml
|
||||
<bean id="template" class="org.springframework.data.document.mongodb.MongoTemplate">
|
||||
<constructor-arg>
|
||||
<bean class="com.mongodb.Mongo">
|
||||
<constructor-arg value="localhost" />
|
||||
<constructor-arg value="27017" />
|
||||
</bean>
|
||||
</constructor-arg>
|
||||
<constructor-arg value="database" />
|
||||
<property name="defaultCollectionName" value="springdata" />
|
||||
<constructor-arg>
|
||||
<bean class="com.mongodb.Mongo">
|
||||
<constructor-arg value="localhost" />
|
||||
<constructor-arg value="27017" />
|
||||
</bean>
|
||||
</constructor-arg>
|
||||
<constructor-arg value="database" />
|
||||
</bean>
|
||||
|
||||
<mongo:repositories base-package="com.acme.repository" />
|
||||
```
|
||||
|
||||
This will find the repository interface and register a proxy object in the container. You can use it as shown below:
|
||||
This will find the repository interface and register a proxy object in the container. You can use it as shown below:
|
||||
|
||||
``java
|
||||
```java
|
||||
@Service
|
||||
public class MyService {
|
||||
|
||||
@Autowired
|
||||
private final PersonRepository repository;
|
||||
|
||||
@Autowired
|
||||
public MyService(PersonRepository repository) {
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
|
||||
repository.deleteAll();
|
||||
@@ -134,16 +130,12 @@ public class MyService {
|
||||
person = repository.save(person);
|
||||
|
||||
List<Person> lastNameResults = repository.findByLastname("Gierke");
|
||||
|
||||
List<Person> firstNameResults = repository.findByFirstnameLike("Oli*");
|
||||
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
Contributing to Spring Data
|
||||
---------------------------
|
||||
## Contributing to Spring Data
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
|
||||
47
pom.xml
47
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.0.4.RELEASE</version>
|
||||
<version>1.2.0.RELEASE</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.5.2.RELEASE</springdata.commons>
|
||||
<springdata.commons>1.6.2.RELEASE</springdata.commons>
|
||||
<mongo>2.10.1</mongo>
|
||||
</properties>
|
||||
|
||||
@@ -37,9 +37,9 @@
|
||||
<developer>
|
||||
<id>ogierke</id>
|
||||
<name>Oliver Gierke</name>
|
||||
<email>ogierke at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>ogierke at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Project Lean</role>
|
||||
</roles>
|
||||
@@ -48,9 +48,9 @@
|
||||
<developer>
|
||||
<id>trisberg</id>
|
||||
<name>Thomas Risberg</name>
|
||||
<email>trisberg at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>trisberg at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
@@ -59,9 +59,9 @@
|
||||
<developer>
|
||||
<id>mpollack</id>
|
||||
<name>Mark Pollack</name>
|
||||
<email>mpollack at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>mpollack at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
@@ -70,14 +70,25 @@
|
||||
<developer>
|
||||
<id>jbrisbin</id>
|
||||
<name>Jon Brisbin</name>
|
||||
<email>jbrisbin at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>jbrisbin at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>-6</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>tdarimont</id>
|
||||
<name>Thomas Darimont</name>
|
||||
<email>tdarimont at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<dependencies>
|
||||
@@ -88,11 +99,11 @@
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-release</id>
|
||||
<url>http://repo.springsource.org/libs-release</url>
|
||||
<id>spring-lib-release</id>
|
||||
<url>http://repo.springsource.org/libs-release-local</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<context version="7.0.3.1152">
|
||||
<context version="7.1.7.187">
|
||||
<scope name="spring-data-mongodb" type="Project">
|
||||
<element name="Filter" type="TypeFilterReferenceOverridden">
|
||||
<element name="org.springframework.data.mongodb.**" type="IncludeTypePattern"/>
|
||||
@@ -10,6 +10,7 @@
|
||||
<element name="**.config.**" type="WeakTypePattern"/>
|
||||
</element>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|GridFS"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Monitoring"/>
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Repositories"/>
|
||||
</element>
|
||||
@@ -93,6 +94,12 @@
|
||||
<dependency type="AllowedDependency" toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query"/>
|
||||
</element>
|
||||
</element>
|
||||
<element name="API" type="Subsystem">
|
||||
<element name="Assignment" type="TypeFilter">
|
||||
<element name="org.springframework.data.mongodb.*" type="IncludeTypePattern"/>
|
||||
</element>
|
||||
<stereotype name="Public"/>
|
||||
</element>
|
||||
</architecture>
|
||||
<workspace>
|
||||
<element name="src/main/java" type="JavaRootDirectory">
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -141,8 +141,8 @@
|
||||
|
||||
<plugin>
|
||||
<groupId>com.mysema.maven</groupId>
|
||||
<artifactId>maven-apt-plugin</artifactId>
|
||||
<version>1.0.4</version>
|
||||
<artifactId>apt-maven-plugin</artifactId>
|
||||
<version>1.0.8</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,6 +32,7 @@ import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.support.CachingIsNewStrategyFactory;
|
||||
@@ -58,12 +59,12 @@ public abstract class AbstractMongoConfiguration {
|
||||
protected abstract String getDatabaseName();
|
||||
|
||||
/**
|
||||
* Return the {@link Mongo} instance to connect to.
|
||||
* Return the {@link Mongo} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link Mongo} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public abstract Mongo mongo() throws Exception;
|
||||
|
||||
/**
|
||||
@@ -135,6 +136,10 @@ public abstract class AbstractMongoConfiguration {
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
|
||||
if (abbreviateFieldNames()) {
|
||||
mappingContext.setFieldNamingStrategy(new CamelCaseAbbreviatingFieldNamingStrategy());
|
||||
}
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
@@ -204,4 +209,15 @@ public abstract class AbstractMongoConfiguration {
|
||||
|
||||
return initialEntitySet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures whether to abbreviate field names for domain objects by configuring a
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced
|
||||
* customization needs, consider overriding {@link #mappingMongoConverter()}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected boolean abbreviateFieldNames() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -13,11 +13,14 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
/**
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* Constants to declare bean names used by the namespace configuration.
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
public abstract class BeanNames {
|
||||
|
||||
@@ -28,4 +31,6 @@ public abstract class BeanNames {
|
||||
static final String VALIDATING_EVENT_LISTENER = "validatingMongoEventListener";
|
||||
static final String IS_NEW_STRATEGY_FACTORY = "isNewStrategyFactory";
|
||||
static final String DEFAULT_CONVERTER_BEAN_NAME = "mappingConverter";
|
||||
static final String MONGO_TEMPLATE = "mongoTemplate";
|
||||
static final String GRID_FS_TEMPLATE = "gridFsTemplate";
|
||||
}
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.beans.factory.BeanDefinitionStoreException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.gridfs.GridFsTemplate;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to parse {@code gridFsTemplate} elements into {@link BeanDefinition}s.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||
throws BeanDefinitionStoreException {
|
||||
|
||||
String id = super.resolveId(element, definition, parserContext);
|
||||
return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String converterRef = element.getAttribute("converter-ref");
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
|
||||
BeanDefinitionBuilder gridFsTemplateBuilder = BeanDefinitionBuilder.genericBeanDefinition(GridFsTemplate.class);
|
||||
|
||||
if (StringUtils.hasText(dbFactoryRef)) {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(dbFactoryRef);
|
||||
} else {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(BeanNames.DB_FACTORY);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(converterRef)) {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(converterRef);
|
||||
} else {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(BeanNames.DEFAULT_CONVERTER_BEAN_NAME);
|
||||
}
|
||||
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(gridFsTemplateBuilder, BeanNames.GRID_FS_TEMPLATE)
|
||||
.getBeanDefinition();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -54,6 +54,7 @@ import org.springframework.data.mapping.context.MappingContextIsNewStrategyFacto
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
|
||||
import org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener;
|
||||
@@ -69,6 +70,7 @@ import org.w3c.dom.Element;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Maciej Walkowiak
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
@@ -104,6 +106,11 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
converterBuilder.addConstructorArgReference(dbFactoryRef);
|
||||
converterBuilder.addConstructorArgReference(ctxRef);
|
||||
|
||||
String typeMapperRef = element.getAttribute("type-mapper-ref");
|
||||
if (StringUtils.hasText(typeMapperRef)) {
|
||||
converterBuilder.addPropertyReference("typeMapper", typeMapperRef);
|
||||
}
|
||||
|
||||
if (conversionsDefinition != null) {
|
||||
converterBuilder.addPropertyValue("customConversions", conversionsDefinition);
|
||||
}
|
||||
@@ -201,6 +208,12 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
String abbreviateFieldNames = element.getAttribute("abbreviate-field-names");
|
||||
if ("true".equals(abbreviateFieldNames)) {
|
||||
mappingContextBuilder.addPropertyValue("fieldNamingStrategy", new RootBeanDefinition(
|
||||
CamelCaseAbbreviatingFieldNamingStrategy.class));
|
||||
}
|
||||
|
||||
ctxRef = converterId + "." + MAPPING_CONTEXT;
|
||||
|
||||
parserContext.registerBeanComponent(componentDefinitionBuilder.getComponent(mappingContextBuilder, ctxRef));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,6 +24,7 @@ import org.springframework.data.repository.config.RepositoryConfigurationExtensi
|
||||
* {@link org.springframework.beans.factory.xml.NamespaceHandler} for Mongo DB configuration.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
public class MongoNamespaceHandler extends NamespaceHandlerSupport {
|
||||
|
||||
@@ -42,5 +43,7 @@ public class MongoNamespaceHandler extends NamespaceHandlerSupport {
|
||||
registerBeanDefinitionParser("db-factory", new MongoDbFactoryParser());
|
||||
registerBeanDefinitionParser("jmx", new MongoJmxParser());
|
||||
registerBeanDefinitionParser("auditing", new MongoAuditingBeanDefinitionParser());
|
||||
registerBeanDefinitionParser("template", new MongoTemplateParser());
|
||||
registerBeanDefinitionParser("gridFsTemplate", new GridFsTemplateParser());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.MongoParsingUtils.*;
|
||||
|
||||
import org.springframework.beans.factory.BeanDefinitionStoreException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to parse {@code template} elements into {@link BeanDefinition}s.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||
throws BeanDefinitionStoreException {
|
||||
|
||||
String id = super.resolveId(element, definition, parserContext);
|
||||
return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String converterRef = element.getAttribute("converter-ref");
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
|
||||
BeanDefinitionBuilder mongoTemplateBuilder = BeanDefinitionBuilder.genericBeanDefinition(MongoTemplate.class);
|
||||
setPropertyValue(mongoTemplateBuilder, element, "write-concern", "writeConcern");
|
||||
|
||||
if (StringUtils.hasText(dbFactoryRef)) {
|
||||
mongoTemplateBuilder.addConstructorArgReference(dbFactoryRef);
|
||||
} else {
|
||||
mongoTemplateBuilder.addConstructorArgReference(BeanNames.DB_FACTORY);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(converterRef)) {
|
||||
mongoTemplateBuilder.addConstructorArgReference(converterRef);
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder writeConcernPropertyEditorBuilder = getWriteConcernPropertyEditorBuilder();
|
||||
|
||||
BeanComponentDefinition component = helper.getComponent(writeConcernPropertyEditorBuilder);
|
||||
parserContext.registerBeanComponent(component);
|
||||
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(mongoTemplateBuilder, BeanNames.MONGO_TEMPLATE)
|
||||
.getBeanDefinition();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@@ -22,7 +24,6 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBCollection;
|
||||
@@ -34,9 +35,13 @@ import com.mongodb.MongoException;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Komi Innocent
|
||||
*/
|
||||
public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
|
||||
@@ -135,12 +140,17 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if (Integer.valueOf(1).equals(value)) {
|
||||
indexFields.add(IndexField.create(key, Order.ASCENDING));
|
||||
} else if (Integer.valueOf(-1).equals(value)) {
|
||||
indexFields.add(IndexField.create(key, Order.DESCENDING));
|
||||
} else if ("2d".equals(value)) {
|
||||
if ("2d".equals(value)) {
|
||||
indexFields.add(IndexField.geo(key));
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,10 +13,9 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mongodb.core.MongoActionOperation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteResult;
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,14 +26,13 @@ import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
|
||||
/**
|
||||
* Helper class featuring helper methods for internal MongoDb classes.
|
||||
* <p/>
|
||||
* <p>
|
||||
* Mainly intended for internal use within the framework.
|
||||
* Helper class featuring helper methods for internal MongoDb classes. Mainly intended for internal use within the
|
||||
* framework.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Graeme Rocher
|
||||
* @author Oliver Gierke
|
||||
* @author Randy Watler
|
||||
* @since 1.0
|
||||
*/
|
||||
public abstract class MongoDbUtils {
|
||||
@@ -131,8 +130,11 @@ public abstract class MongoDbUtils {
|
||||
holderToUse.addDB(databaseName, db);
|
||||
}
|
||||
|
||||
TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo));
|
||||
holderToUse.setSynchronizedWithTransaction(true);
|
||||
// synchronize holder only if not yet synchronized
|
||||
if (!holderToUse.isSynchronizedWithTransaction()) {
|
||||
TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo));
|
||||
holderToUse.setSynchronizedWithTransaction(true);
|
||||
}
|
||||
|
||||
if (holderToUse != dbHolder) {
|
||||
TransactionSynchronizationManager.bindResource(mongo, holderToUse);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2012 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -148,6 +148,7 @@ public class MongoFactoryBean implements FactoryBean<Mongo>, InitializingBean, D
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
|
||||
Mongo mongo;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,6 +19,9 @@ import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResults;
|
||||
@@ -45,6 +48,8 @@ import com.mongodb.WriteResult;
|
||||
* @author Thomas Risberg
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Tobias Trelle
|
||||
* @author Chuong Ngo
|
||||
*/
|
||||
public interface MongoOperations {
|
||||
|
||||
@@ -301,6 +306,57 @@ public interface MongoOperations {
|
||||
*/
|
||||
<T> GroupByResults<T> group(Criteria criteria, String inputCollectionName, GroupBy groupBy, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the
|
||||
* inputCollection is derived from the inputType of the aggregation.
|
||||
*
|
||||
* @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be
|
||||
* {@literal null}.
|
||||
* @param collectionName The name of the input collection to use for the aggreation.
|
||||
* @param outputType The parameterized type of the returned list, must not be {@literal null}.
|
||||
* @return The results of the aggregation operation.
|
||||
* @since 1.3
|
||||
*/
|
||||
<O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the
|
||||
* inputCollection is derived from the inputType of the aggregation.
|
||||
*
|
||||
* @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be
|
||||
* {@literal null}.
|
||||
* @param outputType The parameterized type of the returned list, must not be {@literal null}.
|
||||
* @return The results of the aggregation operation.
|
||||
* @since 1.3
|
||||
*/
|
||||
<O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class.
|
||||
*
|
||||
* @param aggregation The {@link Aggregation} specification holding the aggregation operations, must not be
|
||||
* {@literal null}.
|
||||
* @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or
|
||||
* empty.
|
||||
* @param outputType The parameterized type of the returned list, must not be {@literal null}.
|
||||
* @return The results of the aggregation operation.
|
||||
* @since 1.3
|
||||
*/
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class.
|
||||
*
|
||||
* @param aggregation The {@link Aggregation} specification holding the aggregation operations, must not be
|
||||
* {@literal null}.
|
||||
* @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or
|
||||
* empty.
|
||||
* @param outputType The parameterized type of the returned list, must not be {@literal null}.
|
||||
* @return The results of the aggregation operation.
|
||||
* @since 1.3
|
||||
*/
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute a map-reduce operation. The map-reduce operation will be formed with an output type of INLINE
|
||||
*
|
||||
@@ -408,11 +464,16 @@ public interface MongoOperations {
|
||||
* specification
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
*
|
||||
* @return the converted object
|
||||
*/
|
||||
<T> T findOne(Query query, Class<T> entityClass, String collectionName);
|
||||
|
||||
boolean exists(Query query, String collectionName);
|
||||
|
||||
boolean exists(Query query, Class<?> entityClass);
|
||||
|
||||
boolean exists(Query query, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
|
||||
* <p/>
|
||||
@@ -442,7 +503,6 @@ public interface MongoOperations {
|
||||
* specification
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
*
|
||||
* @return the List of converted objects
|
||||
*/
|
||||
<T> List<T> find(Query query, Class<T> entityClass, String collectionName);
|
||||
@@ -464,7 +524,6 @@ public interface MongoOperations {
|
||||
* @param id the id of the document to return
|
||||
* @param entityClass the type to convert the document to
|
||||
* @param collectionName the collection to query for the document
|
||||
*
|
||||
* @param <T>
|
||||
* @return
|
||||
*/
|
||||
@@ -510,7 +569,6 @@ public interface MongoOperations {
|
||||
* specification
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
*
|
||||
* @return the converted object
|
||||
*/
|
||||
<T> T findAndRemove(Query query, Class<T> entityClass, String collectionName);
|
||||
@@ -646,6 +704,18 @@ public interface MongoOperations {
|
||||
*/
|
||||
WriteResult upsert(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing object
|
||||
* @param entityClass class of the pojo to be operated on
|
||||
* @param collectionName name of the collection to update the object in
|
||||
* @return the WriteResult which lets you access the results of the previous write.
|
||||
*/
|
||||
WriteResult upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
@@ -670,6 +740,19 @@ public interface MongoOperations {
|
||||
*/
|
||||
WriteResult updateFirst(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object.
|
||||
* @param entityClass class of the pojo to be operated on
|
||||
* @param collectionName name of the collection to update the object in
|
||||
* @return the WriteResult which lets you access the results of the previous write.
|
||||
*/
|
||||
WriteResult updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
* with the provided updated document.
|
||||
@@ -694,6 +777,19 @@ public interface MongoOperations {
|
||||
*/
|
||||
WriteResult updateMulti(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
* with the provided updated document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object.
|
||||
* @param entityClass class of the pojo to be operated on
|
||||
* @param collectionName name of the collection to update the object in
|
||||
* @return the WriteResult which lets you access the results of the previous write.
|
||||
*/
|
||||
WriteResult updateMulti(final Query query, final Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by id.
|
||||
*
|
||||
@@ -713,11 +809,12 @@ public interface MongoOperations {
|
||||
* Remove all documents that match the provided query document criteria from the the collection used to store the
|
||||
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
|
||||
*
|
||||
* @param <T>
|
||||
* @param query
|
||||
* @param entityClass
|
||||
*/
|
||||
<T> void remove(Query query, Class<T> entityClass);
|
||||
void remove(Query query, Class<?> entityClass);
|
||||
|
||||
void remove(Query query, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove all documents from the specified collection that match the provided query document criteria. There is no
|
||||
|
||||
@@ -52,12 +52,18 @@ import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.BeanWrapper;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.MongoDataIntegrityViolationException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.geo.Distance;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.geo.GeoResults;
|
||||
@@ -69,9 +75,11 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupBy;
|
||||
@@ -112,6 +120,10 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Oliver Gierke
|
||||
* @author Amol Nayak
|
||||
* @author Patryk Wasik
|
||||
* @author Tobias Trelle
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Chuong Ngo
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@@ -134,7 +146,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator();
|
||||
private final QueryMapper mapper;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
|
||||
private WriteConcern writeConcern;
|
||||
private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE;
|
||||
@@ -187,7 +200,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter(mongoDbFactory) : mongoConverter;
|
||||
this.mapper = new QueryMapper(this.mongoConverter);
|
||||
this.queryMapper = new QueryMapper(this.mongoConverter);
|
||||
this.updateMapper = new UpdateMapper(this.mongoConverter);
|
||||
|
||||
// We always have a mapping context in the converter, whether it's a simple one or not
|
||||
mappingContext = this.mongoConverter.getMappingContext();
|
||||
@@ -480,6 +494,24 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
public boolean exists(Query query, Class<?> entityClass) {
|
||||
return exists(query, entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public boolean exists(Query query, String collectionName) {
|
||||
return exists(query, null, collectionName);
|
||||
}
|
||||
|
||||
public boolean exists(Query query, Class<?> entityClass, String collectionName) {
|
||||
|
||||
if (query == null) {
|
||||
throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null");
|
||||
}
|
||||
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass));
|
||||
return execute(collectionName, new FindCallback(mappedQuery)).hasNext();
|
||||
}
|
||||
|
||||
// Find methods that take a Query to express the query and that return a List of objects.
|
||||
|
||||
public <T> List<T> find(Query query, Class<T> entityClass) {
|
||||
@@ -534,8 +566,26 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
mongoConverter, entityClass), near.getMetric());
|
||||
List<GeoResult<T>> result = new ArrayList<GeoResult<T>>(results.size());
|
||||
|
||||
int index = 0;
|
||||
int elementsToSkip = near.getSkip() != null ? near.getSkip() : 0;
|
||||
|
||||
for (Object element : results) {
|
||||
result.add(callback.doWith((DBObject) element));
|
||||
|
||||
/*
|
||||
* As MongoDB currently (2.4.4) doesn't support the skipping of elements in near queries
|
||||
* we skip the elements ourselves to avoid at least the document 2 object mapping overhead.
|
||||
*
|
||||
* @see https://jira.mongodb.org/browse/SERVER-3925
|
||||
*/
|
||||
if (index >= elementsToSkip) {
|
||||
result.add(callback.doWith((DBObject) element));
|
||||
}
|
||||
index++;
|
||||
}
|
||||
|
||||
if (elementsToSkip > 0) {
|
||||
// as we skipped some elements we have to calculate the averageDistance ourselves:
|
||||
return new GeoResults<T>(result, near.getMetric());
|
||||
}
|
||||
|
||||
DBObject stats = (DBObject) commandResult.get("stats");
|
||||
@@ -585,7 +635,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
private long count(Query query, Class<?> entityClass, String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName);
|
||||
final DBObject dbObject = query == null ? null : mapper.getMappedObject(query.getQueryObject(),
|
||||
final DBObject dbObject = query == null ? null : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entityClass == null ? null : mappingContext.getPersistentEntity(entityClass));
|
||||
|
||||
return execute(collectionName, new CollectionCallback<Long>() {
|
||||
@@ -650,10 +700,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
initializeVersionProperty(objectToSave);
|
||||
|
||||
BasicDBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
writer.write(objectToSave, dbDoc);
|
||||
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
Object id = insertDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
@@ -662,6 +711,26 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param objectToSave
|
||||
* @param writer
|
||||
* @return
|
||||
*/
|
||||
private <T> DBObject toDbObject(T objectToSave, MongoWriter<T> writer) {
|
||||
|
||||
if (!(objectToSave instanceof String)) {
|
||||
DBObject dbDoc = new BasicDBObject();
|
||||
writer.write(objectToSave, dbDoc);
|
||||
return dbDoc;
|
||||
} else {
|
||||
try {
|
||||
return (DBObject) JSON.parse((String) objectToSave);
|
||||
} catch (JSONParseException e) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void initializeVersionProperty(Object entity) {
|
||||
|
||||
MongoPersistentEntity<?> mongoPersistentEntity = getPersistentEntity(entity.getClass());
|
||||
@@ -801,19 +870,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
assertUpdateableIdIfNotSet(objectToSave);
|
||||
|
||||
DBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
|
||||
if (!(objectToSave instanceof String)) {
|
||||
writer.write(objectToSave, dbDoc);
|
||||
} else {
|
||||
try {
|
||||
dbDoc = (DBObject) JSON.parse((String) objectToSave);
|
||||
} catch (JSONParseException e) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", e);
|
||||
}
|
||||
}
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
Object id = saveDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
@@ -897,6 +956,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return doUpdate(collectionName, query, update, null, true, false);
|
||||
}
|
||||
|
||||
public WriteResult upsert(Query query, Update update, Class<?> entityClass, String collectionName) {
|
||||
return doUpdate(collectionName, query, update, entityClass, true, false);
|
||||
}
|
||||
|
||||
public WriteResult updateFirst(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, false);
|
||||
}
|
||||
@@ -905,6 +968,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return doUpdate(collectionName, query, update, null, false, false);
|
||||
}
|
||||
|
||||
public WriteResult updateFirst(Query query, Update update, Class<?> entityClass, String collectionName) {
|
||||
return doUpdate(collectionName, query, update, entityClass, false, false);
|
||||
}
|
||||
|
||||
public WriteResult updateMulti(Query query, Update update, Class<?> entityClass) {
|
||||
return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, true);
|
||||
}
|
||||
@@ -913,6 +980,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return doUpdate(collectionName, query, update, null, false, true);
|
||||
}
|
||||
|
||||
public WriteResult updateMulti(final Query query, final Update update, Class<?> entityClass, String collectionName) {
|
||||
return doUpdate(collectionName, query, update, entityClass, false, true);
|
||||
}
|
||||
|
||||
protected WriteResult doUpdate(final String collectionName, final Query query, final Update update,
|
||||
final Class<?> entityClass, final boolean upsert, final boolean multi) {
|
||||
|
||||
@@ -921,10 +992,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject()
|
||||
: mapper.getMappedObject(query.getQueryObject(), entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : mapper.getMappedObject(update.getUpdateObject(),
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Calling update using query: " + queryObj + " and update: " + updateObj + " in collection: "
|
||||
@@ -1014,24 +1085,35 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
public <T> void remove(Query query, Class<T> entityClass) {
|
||||
Assert.notNull(query);
|
||||
doRemove(determineCollectionName(entityClass), query, entityClass);
|
||||
public void remove(Query query, String collectionName) {
|
||||
remove(query, null, collectionName);
|
||||
}
|
||||
|
||||
public void remove(Query query, Class<?> entityClass) {
|
||||
remove(query, entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public void remove(Query query, Class<?> entityClass, String collectionName) {
|
||||
doRemove(collectionName, query, entityClass);
|
||||
}
|
||||
|
||||
protected <T> void doRemove(final String collectionName, final Query query, final Class<T> entityClass) {
|
||||
|
||||
if (query == null) {
|
||||
throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null");
|
||||
throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null!");
|
||||
}
|
||||
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
final DBObject queryObject = query.getQueryObject();
|
||||
final MongoPersistentEntity<?> entity = getPersistentEntity(entityClass);
|
||||
|
||||
execute(collectionName, new CollectionCallback<Void>() {
|
||||
public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
|
||||
DBObject dboq = mapper.getMappedObject(queryObject, entity);
|
||||
maybeEmitEvent(new BeforeDeleteEvent<T>(queryObject, entityClass));
|
||||
|
||||
DBObject dboq = queryMapper.getMappedObject(queryObject, entity);
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName,
|
||||
entityClass, null, queryObject);
|
||||
@@ -1044,15 +1126,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) : collection.remove(dboq,
|
||||
writeConcernToUse);
|
||||
handleAnyWriteResultErrors(wr, dboq, MongoActionOperation.REMOVE);
|
||||
|
||||
maybeEmitEvent(new AfterDeleteEvent<T>(queryObject, entityClass));
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void remove(final Query query, String collectionName) {
|
||||
doRemove(collectionName, query, null);
|
||||
}
|
||||
|
||||
public <T> List<T> findAll(Class<T> entityClass) {
|
||||
return executeFindMultiInternal(new FindCallback(null), null, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass), determineCollectionName(entityClass));
|
||||
@@ -1127,7 +1208,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
if (criteria == null) {
|
||||
dbo.put("cond", null);
|
||||
} else {
|
||||
dbo.put("cond", mapper.getMappedObject(criteria.getCriteriaObject(), null));
|
||||
dbo.put("cond", queryMapper.getMappedObject(criteria.getCriteriaObject(), null));
|
||||
}
|
||||
// If initial document was a JavaScript string, potentially loaded by Spring's Resource abstraction, load it and
|
||||
// convert to DBObject
|
||||
@@ -1176,6 +1257,64 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, Class<O> outputType) {
|
||||
return aggregate(aggregation, determineCollectionName(aggregation.getInputType()), outputType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(TypedAggregation<?> aggregation, String inputCollectionName,
|
||||
Class<O> outputType) {
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
|
||||
AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(),
|
||||
mappingContext, queryMapper);
|
||||
return aggregate(aggregation, inputCollectionName, outputType, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
return aggregate(aggregation, determineCollectionName(inputType), outputType,
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
@Override
|
||||
public <O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType) {
|
||||
return aggregate(aggregation, collectionName, outputType, null);
|
||||
}
|
||||
|
||||
protected <O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType,
|
||||
AggregationOperationContext context) {
|
||||
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
|
||||
AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context;
|
||||
DBObject command = aggregation.toDbObject(collectionName, rootContext);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
handleCommandError(commandResult, command);
|
||||
|
||||
// map results
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("result");
|
||||
List<O> mappedResults = new ArrayList<O>();
|
||||
DbObjectCallback<O> callback = new UnwrapAndReadDbObjectCallback<O>(mongoConverter, outputType);
|
||||
|
||||
for (DBObject dbObject : resultSet) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
|
||||
return new AggregationResults<O>(mappedResults, commandResult);
|
||||
}
|
||||
|
||||
protected String replaceWithResourceIfNecessary(String function) {
|
||||
|
||||
String func = function;
|
||||
@@ -1284,57 +1423,28 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter
|
||||
* <p/>
|
||||
* The query document is specified as a standard DBObject and so is the fields specification.
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter.
|
||||
* The query document is specified as a standard {@link DBObject} and so is the fields specification.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
* @param query the query document that specifies the criteria used to find a record
|
||||
* @param fields the document that specifies the fields to be returned
|
||||
* @param collectionName name of the collection to retrieve the objects from.
|
||||
* @param query the query document that specifies the criteria used to find a record.
|
||||
* @param fields the document that specifies the fields to be returned.
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @return the List of converted objects.
|
||||
* @return the {@link List} of converted objects.
|
||||
*/
|
||||
protected <T> T doFindOne(String collectionName, DBObject query, DBObject fields, Class<T> entityClass) {
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
DBObject mappedQuery = mapper.getMappedObject(query, entity);
|
||||
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, fields), new ReadDbObjectCallback<T>(readerToUse,
|
||||
entityClass), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified type. The object is
|
||||
* converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless configured
|
||||
* otherwise, an instance of MappingMongoConverter will be used. The query document is specified as a standard
|
||||
* DBObject and so is the fields specification. Can be overridden by subclasses.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
* @param query the query document that specifies the criteria used to find a record
|
||||
* @param fields the document that specifies the fields to be returned
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param preparer allows for customization of the DBCursor used when iterating over the result set, (apply limits,
|
||||
* skips and so on).
|
||||
* @return the List of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass,
|
||||
CursorPreparer preparer) {
|
||||
return doFind(collectionName, query, fields, entityClass, preparer, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass));
|
||||
}
|
||||
|
||||
protected <S, T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<S> entityClass,
|
||||
CursorPreparer preparer, DbObjectCallback<T> objectCallback) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
DBObject mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), fields, entityClass, collectionName));
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mapper.getMappedObject(query, entity), fields), preparer,
|
||||
objectCallback, collectionName);
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), new ReadDbObjectCallback<T>(
|
||||
this.mongoConverter, entityClass), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1348,14 +1458,43 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @return the List of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("find using query: " + query + " fields: " + fields + " for class: " + entityClass
|
||||
+ " in collection: " + collectionName);
|
||||
}
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
return doFind(collectionName, query, fields, entityClass, null, new ReadDbObjectCallback<T>(this.mongoConverter,
|
||||
entityClass));
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified type. The object is
|
||||
* converted from the MongoDB native representation using an instance of {@see MongoConverter}. The query document is
|
||||
* specified as a standard DBObject and so is the fields specification.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from.
|
||||
* @param query the query document that specifies the criteria used to find a record.
|
||||
* @param fields the document that specifies the fields to be returned.
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply
|
||||
* limits, skips and so on).
|
||||
* @return the {@link List} of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass,
|
||||
CursorPreparer preparer) {
|
||||
return doFind(collectionName, query, fields, entityClass, preparer, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass));
|
||||
}
|
||||
|
||||
protected <S, T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<S> entityClass,
|
||||
CursorPreparer preparer, DbObjectCallback<T> objectCallback) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindMultiInternal(new FindCallback(mapper.getMappedObject(query, entity), fields), null,
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
DBObject mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
protected DBObject convertToDbObject(CollectionOptions collectionOptions) {
|
||||
@@ -1393,7 +1532,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
+ entityClass + " in collection: " + collectionName);
|
||||
}
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(mapper.getMappedObject(query, entity), fields, sort),
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
}
|
||||
|
||||
@@ -1408,8 +1547,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
DBObject mappedQuery = mapper.getMappedObject(query, entity);
|
||||
DBObject mappedUpdate = queryMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
@@ -1841,6 +1980,35 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
class UnwrapAndReadDbObjectCallback<T> extends ReadDbObjectCallback<T> {
|
||||
|
||||
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type) {
|
||||
super(reader, type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T doWith(DBObject object) {
|
||||
|
||||
Object idField = object.get(Fields.UNDERSCORE_ID);
|
||||
|
||||
if (!(idField instanceof DBObject)) {
|
||||
return super.doWith(object);
|
||||
}
|
||||
|
||||
DBObject toMap = new BasicDBObject();
|
||||
DBObject nested = (DBObject) idField;
|
||||
toMap.putAll(nested);
|
||||
|
||||
for (String key : object.keySet()) {
|
||||
if (!Fields.UNDERSCORE_ID.equals(key)) {
|
||||
toMap.put(key, object.get(key));
|
||||
}
|
||||
}
|
||||
|
||||
return super.doWith(toMap);
|
||||
}
|
||||
}
|
||||
|
||||
private enum DefaultWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -0,0 +1,286 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.springframework.data.mongodb.core.aggregation.Fields.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* An {@code Aggregation} is a representation of a list of aggregation steps to be performed by the MongoDB Aggregation
|
||||
* Framework.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Aggregation {
|
||||
|
||||
public static final AggregationOperationContext DEFAULT_CONTEXT = new NoOpAggregationOperationContext();
|
||||
|
||||
private final List<AggregationOperation> operations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(AggregationOperation... operations) {
|
||||
return new Aggregation(operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static <T> TypedAggregation<T> newAggregation(Class<T> type, AggregationOperation... operations) {
|
||||
return new TypedAggregation<T>(type, operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(AggregationOperation... aggregationOperations) {
|
||||
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
Assert.isTrue(aggregationOperations.length > 0, "At least one AggregationOperation has to be provided");
|
||||
|
||||
this.operations = Arrays.asList(aggregationOperations);
|
||||
}
|
||||
|
||||
/**
|
||||
* A pointer to the previous {@link AggregationOperation}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static String previousOperation() {
|
||||
return "_id";
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given fields.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ProjectionOperation project(String... fields) {
|
||||
return project(fields(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} includeing the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ProjectionOperation project(Fields fields) {
|
||||
return new ProjectionOperation(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link UnwindOperation} for the field with the given name.
|
||||
*
|
||||
* @param fieldName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public static UnwindOperation unwind(String field) {
|
||||
return new UnwindOperation(field(field));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} for the given fields.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static GroupOperation group(String... fields) {
|
||||
return group(fields(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} for the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static GroupOperation group(Fields fields) {
|
||||
return new GroupOperation(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link SortOperation} for the given {@link Sort}.
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static SortOperation sort(Sort sort) {
|
||||
return new SortOperation(sort);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link SortOperation} for the given sort {@link Direction} and {@code fields}.
|
||||
*
|
||||
* @param direction must not be {@literal null}.
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static SortOperation sort(Direction direction, String... fields) {
|
||||
return new SortOperation(new Sort(direction, fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SkipOperation} skipping the given number of elements.
|
||||
*
|
||||
* @param elementsToSkip must not be less than zero.
|
||||
* @return
|
||||
*/
|
||||
public static SkipOperation skip(int elementsToSkip) {
|
||||
return new SkipOperation(elementsToSkip);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LimitOperation} limiting the result to the given number of elements.
|
||||
*
|
||||
* @param maxElements must not be less than zero.
|
||||
* @return
|
||||
*/
|
||||
public static LimitOperation limit(long maxElements) {
|
||||
return new LimitOperation(maxElements);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} using the given {@link Criteria}.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static MatchOperation match(Criteria criteria) {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for the given field names.
|
||||
*
|
||||
* @see Fields#fields(String...)
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static Fields fields(String... fields) {
|
||||
return Fields.fields(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance from the given field name and target reference.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param target must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public static Fields bind(String name, String target) {
|
||||
return Fields.from(field(name, target));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts this {@link Aggregation} specification to a {@link DBObject}.
|
||||
*
|
||||
* @param inputCollectionName the name of the input collection
|
||||
* @return the {@code DBObject} representing this aggregation
|
||||
*/
|
||||
public DBObject toDbObject(String inputCollectionName, AggregationOperationContext rootContext) {
|
||||
|
||||
AggregationOperationContext context = rootContext;
|
||||
List<DBObject> operationDocuments = new ArrayList<DBObject>(operations.size());
|
||||
|
||||
for (AggregationOperation operation : operations) {
|
||||
|
||||
operationDocuments.add(operation.toDBObject(context));
|
||||
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields());
|
||||
}
|
||||
}
|
||||
|
||||
DBObject command = new BasicDBObject("aggregate", inputCollectionName);
|
||||
command.put("pipeline", operationDocuments);
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return SerializationUtils
|
||||
.serializeToJsonSafely(toDbObject("__collection__", new NoOpAggregationOperationContext()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class NoOpAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return new FieldReference(new ExposedField(field, true));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
return new FieldReference(new ExposedField(new AggregationField(name), true));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Represents one single operation in an aggregation pipeline.
|
||||
*
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public interface AggregationOperation {
|
||||
|
||||
/**
|
||||
* Turns the {@link AggregationOperation} into a {@link DBObject} by using the given
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @return the DBObject
|
||||
*/
|
||||
DBObject toDBObject(AggregationOperationContext context);
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* The context for an {@link AggregationOperation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public interface AggregationOperationContext {
|
||||
|
||||
/**
|
||||
* Returns the mapped {@link DBObject}, potentially converting the source considering mapping metadata etc.
|
||||
*
|
||||
* @param dbObject will never be {@literal null}.
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
DBObject getMappedObject(DBObject dbObject);
|
||||
|
||||
/**
|
||||
* Returns a {@link FieldReference} for the given field or {@literal null} if the context does not expose the given
|
||||
* field.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
FieldReference getReference(Field field);
|
||||
|
||||
/**
|
||||
* Returns the {@link FieldReference} for the field with the given name or {@literal null} if the context does not
|
||||
* expose a field with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
FieldReference getReference(String name);
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Collects the results of executing an aggregation operation.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @param <T> The class in which the results are mapped onto.
|
||||
* @since 1.3
|
||||
*/
|
||||
public class AggregationResults<T> implements Iterable<T> {
|
||||
|
||||
private final List<T> mappedResults;
|
||||
private final DBObject rawResults;
|
||||
private final String serverUsed;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationResults} instance from the given mapped and raw results.
|
||||
*
|
||||
* @param mappedResults must not be {@literal null}.
|
||||
* @param rawResults must not be {@literal null}.
|
||||
*/
|
||||
public AggregationResults(List<T> mappedResults, DBObject rawResults) {
|
||||
|
||||
Assert.notNull(mappedResults);
|
||||
Assert.notNull(rawResults);
|
||||
|
||||
this.mappedResults = Collections.unmodifiableList(mappedResults);
|
||||
this.rawResults = rawResults;
|
||||
this.serverUsed = parseServerUsed();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the aggregation results.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public List<T> getMappedResults() {
|
||||
return mappedResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the unique mapped result. Assumes no result or exactly one.
|
||||
*
|
||||
* @return
|
||||
* @throws IllegalArgumentException in case more than one result is available.
|
||||
*/
|
||||
public T getUniqueMappedResult() {
|
||||
Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one!");
|
||||
return mappedResults.size() == 1 ? mappedResults.get(0) : null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
public Iterator<T> iterator() {
|
||||
return mappedResults.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the server that has been used to perform the aggregation.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getServerUsed() {
|
||||
return serverUsed;
|
||||
}
|
||||
|
||||
private String parseServerUsed() {
|
||||
|
||||
Object object = rawResults.get("serverUsed");
|
||||
return object instanceof String ? (String) object : null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,385 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CompositeIterator;
|
||||
|
||||
/**
|
||||
* Value object to capture the fields exposed by an {@link AggregationOperation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ExposedFields implements Iterable<ExposedField> {
|
||||
|
||||
private static final List<ExposedField> NO_FIELDS = Collections.emptyList();
|
||||
private static final ExposedFields EMPTY = new ExposedFields(NO_FIELDS, NO_FIELDS);
|
||||
|
||||
private final List<ExposedField> originalFields;
|
||||
private final List<ExposedField> syntheticFields;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance from the given {@link ExposedField}s.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ExposedFields from(ExposedField... fields) {
|
||||
return from(Arrays.asList(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance from the given {@link ExposedField}s.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static ExposedFields from(List<ExposedField> fields) {
|
||||
|
||||
ExposedFields result = EMPTY;
|
||||
|
||||
for (ExposedField field : fields) {
|
||||
result = result.and(field);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates synthetic {@link ExposedFields} from the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ExposedFields synthetic(Fields fields) {
|
||||
return createFields(fields, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates non-synthetic {@link ExposedFields} from the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ExposedFields nonSynthetic(Fields fields) {
|
||||
return createFields(fields, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either sythetic or non-synthetic way.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param synthetic
|
||||
* @return
|
||||
*/
|
||||
private static ExposedFields createFields(Fields fields, boolean synthetic) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
List<ExposedField> result = new ArrayList<ExposedField>();
|
||||
|
||||
for (Field field : fields) {
|
||||
result.add(new ExposedField(field, synthetic));
|
||||
}
|
||||
|
||||
return ExposedFields.from(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} with the given orignals and synthetics.
|
||||
*
|
||||
* @param originals must not be {@literal null}.
|
||||
* @param synthetic must not be {@literal null}.
|
||||
*/
|
||||
private ExposedFields(List<ExposedField> originals, List<ExposedField> synthetic) {
|
||||
|
||||
this.originalFields = originals;
|
||||
this.syntheticFields = synthetic;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} adding the given {@link ExposedField}.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ExposedFields and(ExposedField field) {
|
||||
|
||||
Assert.notNull(field, "Exposed field must not be null!");
|
||||
|
||||
ArrayList<ExposedField> result = new ArrayList<ExposedField>();
|
||||
result.addAll(field.synthetic ? syntheticFields : originalFields);
|
||||
result.add(field);
|
||||
|
||||
return new ExposedFields(field.synthetic ? originalFields : result, field.synthetic ? result : syntheticFields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the field with the given name or {@literal null} if no field with the given name is available.
|
||||
*
|
||||
* @param name
|
||||
* @return
|
||||
*/
|
||||
public ExposedField getField(String name) {
|
||||
|
||||
for (ExposedField field : this) {
|
||||
if (field.canBeReferredToBy(name)) {
|
||||
return field;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes no non-synthetic fields at all.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesNoNonSyntheticFields() {
|
||||
return originalFields.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes a single non-synthetic field only.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesSingleNonSyntheticFieldOnly() {
|
||||
return originalFields.size() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes no fields at all.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesNoFields() {
|
||||
return exposedFieldsCount() == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes a single field only.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesSingleFieldOnly() {
|
||||
return exposedFieldsCount() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
private int exposedFieldsCount() {
|
||||
return originalFields.size() + syntheticFields.size();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
@Override
|
||||
public Iterator<ExposedField> iterator() {
|
||||
|
||||
CompositeIterator<ExposedField> iterator = new CompositeIterator<ExposedField>();
|
||||
iterator.add(syntheticFields.iterator());
|
||||
iterator.add(originalFields.iterator());
|
||||
|
||||
return iterator;
|
||||
}
|
||||
|
||||
/**
|
||||
* A single exposed field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class ExposedField implements Field {
|
||||
|
||||
private final boolean synthetic;
|
||||
private final Field field;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedField} with the given key.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param synthetic whether the exposed field is synthetic.
|
||||
*/
|
||||
public ExposedField(String key, boolean synthetic) {
|
||||
this(Fields.field(key), synthetic);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedField} for the given {@link Field}.
|
||||
*
|
||||
* @param delegate must not be {@literal null}.
|
||||
* @param synthetic whether the exposed field is synthetic.
|
||||
*/
|
||||
public ExposedField(Field delegate, boolean synthetic) {
|
||||
|
||||
this.field = delegate;
|
||||
this.synthetic = synthetic;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getKey()
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return field.getName();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getTarget()
|
||||
*/
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return field.getTarget();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field can be referred to using the given name.
|
||||
*
|
||||
* @param input
|
||||
* @return
|
||||
*/
|
||||
public boolean canBeReferredToBy(String input) {
|
||||
return getTarget().equals(input);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("AggregationField: %s, synthetic: %s", field, synthetic);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof ExposedField)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ExposedField that = (ExposedField) obj;
|
||||
|
||||
return this.field.equals(that.field) && this.synthetic == that.synthetic;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * field.hashCode();
|
||||
result += 31 * (synthetic ? 0 : 1);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A reference to an {@link ExposedField}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class FieldReference {
|
||||
|
||||
private final ExposedField field;
|
||||
|
||||
/**
|
||||
* Creates a new {@link FieldReference} for the given {@link ExposedField}.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
*/
|
||||
public FieldReference(ExposedField field) {
|
||||
|
||||
Assert.notNull(field, "ExposedField must not be null!");
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the raw, unqualified reference, i.e. the field reference without a {@literal $} prefix.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getRaw() {
|
||||
String target = field.getTarget();
|
||||
return field.synthetic ? target : String.format("%s.%s", Fields.UNDERSCORE_ID, target);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("$%s", getRaw());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof FieldReference)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
FieldReference that = (FieldReference) obj;
|
||||
|
||||
return this.field.equals(that.field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return field.hashCode();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperationContext} that combines the available field references from a given
|
||||
* {@code AggregationOperationContext} and an {@link FieldsExposingAggregationOperation}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final ExposedFields exposedFields;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}.
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
*/
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields) {
|
||||
|
||||
Assert.notNull(exposedFields, "ExposedFields must not be null!");
|
||||
this.exposedFields = exposedFields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return getReference(field.getTarget());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
|
||||
ExposedField field = exposedFields.getField(name);
|
||||
|
||||
if (field != null) {
|
||||
return new FieldReference(field);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
/**
|
||||
* Abstraction for a field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public interface Field {
|
||||
|
||||
/**
|
||||
* Returns the name of the field.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
String getName();
|
||||
|
||||
/**
|
||||
* Returns the target of the field. In case no explicit target is available {@link #getName()} should be returned.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
String getTarget();
|
||||
}
|
||||
@@ -0,0 +1,271 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Value object to capture a list of {@link Field} instances.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Fields implements Iterable<Field> {
|
||||
|
||||
private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s! Please "
|
||||
+ "customize your field definitions to get to unique field names!";
|
||||
|
||||
public static String UNDERSCORE_ID = "_id";
|
||||
public static String UNDERSCORE_ID_REF = "$_id";
|
||||
|
||||
private final List<Field> fields;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance from the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public static Fields from(Field... fields) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
return new Fields(Arrays.asList(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for {@link Field}s with the given names.
|
||||
*
|
||||
* @param names must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static Fields fields(String... names) {
|
||||
|
||||
Assert.notNull(names, "Field names must not be null!");
|
||||
|
||||
List<Field> fields = new ArrayList<Field>();
|
||||
|
||||
for (String name : names) {
|
||||
fields.add(field(name));
|
||||
}
|
||||
|
||||
return new Fields(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Field} with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public static Field field(String name) {
|
||||
return new AggregationField(name);
|
||||
}
|
||||
|
||||
public static Field field(String name, String target) {
|
||||
Assert.hasText(target, "Target must not be null or empty!");
|
||||
return new AggregationField(name, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance using the given {@link Field}s.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
private Fields(List<Field> fields) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
|
||||
this.fields = verify(fields);
|
||||
}
|
||||
|
||||
private static final List<Field> verify(List<Field> fields) {
|
||||
|
||||
Map<String, Field> reference = new HashMap<String, Field>();
|
||||
|
||||
for (Field field : fields) {
|
||||
|
||||
String name = field.getName();
|
||||
Field found = reference.get(name);
|
||||
|
||||
if (found != null) {
|
||||
throw new IllegalArgumentException(String.format(AMBIGUOUS_EXCEPTION, name, found, field));
|
||||
}
|
||||
|
||||
reference.put(name, field);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
private Fields(Fields existing, Field tail) {
|
||||
|
||||
this.fields = new ArrayList<Field>(existing.fields.size() + 1);
|
||||
this.fields.addAll(existing.fields);
|
||||
this.fields.add(tail);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance with a new {@link Field} of the given name added.
|
||||
*
|
||||
* @param name must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public Fields and(String name) {
|
||||
return and(new AggregationField(name));
|
||||
}
|
||||
|
||||
public Fields and(String name, String target) {
|
||||
return and(new AggregationField(name, target));
|
||||
}
|
||||
|
||||
public Fields and(Field field) {
|
||||
return new Fields(this, field);
|
||||
}
|
||||
|
||||
public Fields and(Fields fields) {
|
||||
|
||||
Fields result = this;
|
||||
|
||||
for (Field field : fields) {
|
||||
result = result.and(field);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public Field getField(String name) {
|
||||
|
||||
for (Field field : fields) {
|
||||
if (field.getName().equals(name)) {
|
||||
return field;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Iterable#iterator()
|
||||
*/
|
||||
@Override
|
||||
public Iterator<Field> iterator() {
|
||||
return fields.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object to encapsulate a field in an aggregation operation.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class AggregationField implements Field {
|
||||
|
||||
private final String name;
|
||||
private final String target;
|
||||
|
||||
/**
|
||||
* Creates an aggregation fieldwith the given name. As no target is set explicitly, the name will be used as target
|
||||
* as well.
|
||||
*
|
||||
* @param key
|
||||
*/
|
||||
public AggregationField(String key) {
|
||||
this(key, null);
|
||||
}
|
||||
|
||||
public AggregationField(String name, String target) {
|
||||
|
||||
Assert.hasText(name, "AggregationField name must not be null or empty!");
|
||||
|
||||
if (target == null && name.contains(".")) {
|
||||
this.name = name.substring(name.indexOf(".") + 1);
|
||||
this.target = name;
|
||||
} else {
|
||||
this.name = name;
|
||||
this.target = target;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getKey()
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#getAlias()
|
||||
*/
|
||||
public String getTarget() {
|
||||
return StringUtils.hasText(this.target) ? this.target : this.name;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("AggregationField - name: %s, target: %s", name, target);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof AggregationField)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AggregationField that = (AggregationField) obj;
|
||||
|
||||
return this.name.equals(that.name) && ObjectUtils.nullSafeEquals(this.target, that.target);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * name.hashCode();
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(target);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperation} that exposes new {@link ExposedFields} that can be used for later aggregation pipeline
|
||||
* {@code AggregationOperation}s.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface FieldsExposingAggregationOperation extends AggregationOperation {
|
||||
|
||||
/**
|
||||
* Returns the fields exposed by the {@link AggregationOperation}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
ExposedFields getFields();
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class GeoNearOperation implements AggregationOperation {
|
||||
|
||||
private final NearQuery nearQuery;
|
||||
|
||||
public GeoNearOperation(NearQuery nearQuery) {
|
||||
|
||||
Assert.notNull(nearQuery);
|
||||
this.nearQuery = nearQuery;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$geoNear", context.getMappedObject(nearQuery.toDBObject()));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,375 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $group}-operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/group/#stage._S_group
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class GroupOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
/**
|
||||
* Holds the non-synthetic fields which are the fields of the group-id structure.
|
||||
*/
|
||||
private final ExposedFields idFields;
|
||||
|
||||
private final List<Operation> operations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} including the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
public GroupOperation(Fields fields) {
|
||||
|
||||
this.idFields = ExposedFields.nonSynthetic(fields);
|
||||
this.operations = new ArrayList<Operation>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} from the given {@link GroupOperation}.
|
||||
*
|
||||
* @param groupOperation must not be {@literal null}.
|
||||
*/
|
||||
protected GroupOperation(GroupOperation groupOperation) {
|
||||
this(groupOperation, Collections.<Operation> emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} from the given {@link GroupOperation} and the given {@link Operation}s.
|
||||
*
|
||||
* @param groupOperation
|
||||
* @param nextOperations
|
||||
*/
|
||||
private GroupOperation(GroupOperation groupOperation, List<Operation> nextOperations) {
|
||||
|
||||
Assert.notNull(groupOperation, "GroupOperation must not be null!");
|
||||
Assert.notNull(nextOperations, "NextOperations must not be null!");
|
||||
|
||||
this.idFields = groupOperation.idFields;
|
||||
this.operations = new ArrayList<Operation>(nextOperations.size() + 1);
|
||||
this.operations.addAll(groupOperation.operations);
|
||||
this.operations.addAll(nextOperations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} from the current one adding the given {@link Operation}.
|
||||
*
|
||||
* @param operation must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected GroupOperation and(Operation operation) {
|
||||
return new GroupOperation(this, Arrays.asList(operation));
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link GroupOperation}s on a field.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GroupOperationBuilder {
|
||||
|
||||
private final GroupOperation groupOperation;
|
||||
private final Operation operation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperationBuilder} from the given {@link GroupOperation} and {@link Operation}.
|
||||
*
|
||||
* @param groupOperation
|
||||
* @param operation
|
||||
*/
|
||||
private GroupOperationBuilder(GroupOperation groupOperation, Operation operation) {
|
||||
|
||||
Assert.notNull(groupOperation, "GroupOperation must not be null!");
|
||||
Assert.notNull(operation, "Operation must not be null!");
|
||||
|
||||
this.groupOperation = groupOperation;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to specify an alias for the new-operation operation.
|
||||
*
|
||||
* @param alias
|
||||
* @return
|
||||
*/
|
||||
public GroupOperation as(String alias) {
|
||||
return this.groupOperation.and(operation.withAlias(alias));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for a {@code $sum}-expression.
|
||||
* <p>
|
||||
* Count expressions are emulated via {@code $sum: 1}.
|
||||
* <p>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder count() {
|
||||
return newBuilder(GroupOps.SUM, null, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for a {@code $sum}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder sum(String reference) {
|
||||
return sum(reference, null);
|
||||
}
|
||||
|
||||
private GroupOperationBuilder sum(String reference, Object value) {
|
||||
return newBuilder(GroupOps.SUM, reference, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $add_to_set}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder addToSet(String reference) {
|
||||
return addToSet(reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $add_to_set}-expression for the given value.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder addToSet(Object value) {
|
||||
return addToSet(null, value);
|
||||
}
|
||||
|
||||
private GroupOperationBuilder addToSet(String reference, Object value) {
|
||||
return newBuilder(GroupOps.ADD_TO_SET, reference, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $last}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder last(String reference) {
|
||||
return newBuilder(GroupOps.LAST, reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for a {@code $first}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder first(String reference) {
|
||||
return newBuilder(GroupOps.FIRST, reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $avg}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder avg(String reference) {
|
||||
return newBuilder(GroupOps.AVG, reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $push}-expression for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder push(String reference) {
|
||||
return push(reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $push}-expression for the given value.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder push(Object value) {
|
||||
return push(null, value);
|
||||
}
|
||||
|
||||
private GroupOperationBuilder push(String reference, Object value) {
|
||||
return newBuilder(GroupOps.PUSH, reference, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $min}-expression that for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder min(String reference) {
|
||||
return newBuilder(GroupOps.MIN, reference, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@link GroupOperationBuilder} for an {@code $max}-expression that for the given field-reference.
|
||||
*
|
||||
* @param reference
|
||||
* @return
|
||||
*/
|
||||
public GroupOperationBuilder max(String reference) {
|
||||
return newBuilder(GroupOps.MAX, reference, null);
|
||||
}
|
||||
|
||||
private GroupOperationBuilder newBuilder(Keyword keyword, String reference, Object value) {
|
||||
return new GroupOperationBuilder(this, new Operation(keyword, null, reference, value));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
ExposedFields fields = this.idFields.and(new ExposedField(Fields.UNDERSCORE_ID, true));
|
||||
|
||||
for (Operation operation : operations) {
|
||||
fields = fields.and(operation.asField());
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public com.mongodb.DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBObject operationObject = new BasicDBObject();
|
||||
|
||||
if (idFields.exposesNoNonSyntheticFields()) {
|
||||
|
||||
operationObject.put(Fields.UNDERSCORE_ID, null);
|
||||
|
||||
} else if (idFields.exposesSingleNonSyntheticFieldOnly()) {
|
||||
|
||||
FieldReference reference = context.getReference(idFields.iterator().next());
|
||||
operationObject.put(Fields.UNDERSCORE_ID, reference.toString());
|
||||
|
||||
} else {
|
||||
|
||||
BasicDBObject inner = new BasicDBObject();
|
||||
|
||||
for (ExposedField field : idFields) {
|
||||
FieldReference reference = context.getReference(field);
|
||||
inner.put(field.getName(), reference.toString());
|
||||
}
|
||||
|
||||
operationObject.put(Fields.UNDERSCORE_ID, inner);
|
||||
}
|
||||
|
||||
for (Operation operation : operations) {
|
||||
operationObject.putAll(operation.toDBObject(context));
|
||||
}
|
||||
|
||||
return new BasicDBObject("$group", operationObject);
|
||||
}
|
||||
|
||||
interface Keyword {
|
||||
|
||||
String toString();
|
||||
}
|
||||
|
||||
private static enum GroupOps implements Keyword {
|
||||
|
||||
SUM, LAST, FIRST, PUSH, AVG, MIN, MAX, ADD_TO_SET, COUNT;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
String[] parts = name().split("_");
|
||||
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
||||
for (String part : parts) {
|
||||
String lowerCase = part.toLowerCase(Locale.US);
|
||||
builder.append(builder.length() == 0 ? lowerCase : StringUtils.capitalize(lowerCase));
|
||||
}
|
||||
|
||||
return "$" + builder.toString();
|
||||
}
|
||||
}
|
||||
|
||||
static class Operation implements AggregationOperation {
|
||||
|
||||
private final Keyword op;
|
||||
private final String key;
|
||||
private final String reference;
|
||||
private final Object value;
|
||||
|
||||
public Operation(Keyword op, String key, String reference, Object value) {
|
||||
|
||||
this.op = op;
|
||||
this.key = key;
|
||||
this.reference = reference;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public Operation withAlias(String key) {
|
||||
return new Operation(op, key, reference, value);
|
||||
}
|
||||
|
||||
public ExposedField asField() {
|
||||
return new ExposedField(key, true);
|
||||
}
|
||||
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(key, new BasicDBObject(op.toString(), getValue(context)));
|
||||
}
|
||||
|
||||
public Object getValue(AggregationOperationContext context) {
|
||||
return reference == null ? value : context.getReference(reference).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Operation [op=" + op + ", key=" + key + ", reference=" + reference + ", value=" + value + "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $limit}-operation
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/limit/
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
class LimitOperation implements AggregationOperation {
|
||||
|
||||
private final long maxElements;
|
||||
|
||||
/**
|
||||
* @param maxElements Number of documents to consider.
|
||||
*/
|
||||
public LimitOperation(long maxElements) {
|
||||
|
||||
Assert.isTrue(maxElements >= 0, "Maximum number of elements must be greater or equal to zero!");
|
||||
this.maxElements = maxElements;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$limit", maxElements);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $match}-operation
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/match/
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class MatchOperation implements AggregationOperation {
|
||||
|
||||
private final Criteria criteria;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link Criteria}.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
*/
|
||||
public MatchOperation(Criteria criteria) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
this.criteria = criteria;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$match", context.getMappedObject(criteria.getCriteriaObject()));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,661 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $project}-operation. Projection of field to be used in an
|
||||
* {@link Aggregation}. A projection is similar to a {@link Field} inclusion/exclusion but more powerful. It can
|
||||
* generate new fields, change values of given field etc.
|
||||
* <p>
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/project/
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
private static final List<Projection> NONE = Collections.emptyList();
|
||||
private static final String EXCLUSION_ERROR = "Exclusion of field %s not allowed. Projections by the mongodb "
|
||||
+ "aggregation framework only support the exclusion of the %s field!";
|
||||
|
||||
private final List<Projection> projections;
|
||||
|
||||
/**
|
||||
* Creates a new empty {@link ProjectionOperation}.
|
||||
*/
|
||||
public ProjectionOperation() {
|
||||
this(NONE, NONE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given {@link Fields}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
public ProjectionOperation(Fields fields) {
|
||||
this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor to allow building up {@link ProjectionOperation} instances from already existing
|
||||
* {@link Projection}s.
|
||||
*
|
||||
* @param current must not be {@literal null}.
|
||||
* @param projections must not be {@literal null}.
|
||||
*/
|
||||
private ProjectionOperation(List<? extends Projection> current, List<? extends Projection> projections) {
|
||||
|
||||
Assert.notNull(current, "Current projections must not be null!");
|
||||
Assert.notNull(projections, "Projections must not be null!");
|
||||
|
||||
this.projections = new ArrayList<ProjectionOperation.Projection>(current.size() + projections.size());
|
||||
this.projections.addAll(current);
|
||||
this.projections.addAll(projections);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} with the current {@link Projection}s and the given one.
|
||||
*
|
||||
* @param projection must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private ProjectionOperation and(Projection projection) {
|
||||
return new ProjectionOperation(this.projections, Arrays.asList(projection));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} with the current {@link Projection}s replacing the last current one with
|
||||
* the given one.
|
||||
*
|
||||
* @param projection must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private ProjectionOperation andReplaceLastOneWith(Projection projection) {
|
||||
|
||||
List<Projection> projections = this.projections.isEmpty() ? Collections.<Projection> emptyList() : this.projections
|
||||
.subList(0, this.projections.size() - 1);
|
||||
return new ProjectionOperation(projections, Arrays.asList(projection));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperationBuilder} to define a projection for the field with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder and(String name) {
|
||||
return new ProjectionOperationBuilder(name, this, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Excludes the given fields from the projection.
|
||||
*
|
||||
* @param fieldNames must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andExclude(String... fieldNames) {
|
||||
|
||||
for (String fieldName : fieldNames) {
|
||||
Assert.isTrue(Fields.UNDERSCORE_ID.equals(fieldName),
|
||||
String.format(EXCLUSION_ERROR, fieldName, Fields.UNDERSCORE_ID));
|
||||
}
|
||||
|
||||
List<FieldProjection> excludeProjections = FieldProjection.from(Fields.fields(fieldNames), false);
|
||||
return new ProjectionOperation(this.projections, excludeProjections);
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the given fields into the projection.
|
||||
*
|
||||
* @param fieldNames must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andInclude(String... fieldNames) {
|
||||
|
||||
List<FieldProjection> projections = FieldProjection.from(Fields.fields(fieldNames), true);
|
||||
return new ProjectionOperation(this.projections, projections);
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the given fields into the projection.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andInclude(Fields fields) {
|
||||
return new ProjectionOperation(this.projections, FieldProjection.from(fields, true));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
ExposedFields fields = null;
|
||||
|
||||
for (Projection projection : projections) {
|
||||
ExposedField field = projection.getExposedField();
|
||||
fields = fields == null ? ExposedFields.from(field) : fields.and(field);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBObject fieldObject = new BasicDBObject();
|
||||
|
||||
for (Projection projection : projections) {
|
||||
fieldObject.putAll(projection.toDBObject(context));
|
||||
}
|
||||
|
||||
return new BasicDBObject("$project", fieldObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link ProjectionOperation}s on a field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class ProjectionOperationBuilder implements AggregationOperation {
|
||||
|
||||
private final String name;
|
||||
private final ProjectionOperation operation;
|
||||
private final OperationProjection previousProjection;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperationBuilder} for the field with the given name on top of the given
|
||||
* {@link ProjectionOperation}.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param operation must not be {@literal null}.
|
||||
* @param previousProjection the previous operation projection, may be {@literal null}.
|
||||
*/
|
||||
public ProjectionOperationBuilder(String name, ProjectionOperation operation, OperationProjection previousProjection) {
|
||||
|
||||
Assert.hasText(name, "Field name must not be null or empty!");
|
||||
Assert.notNull(operation, "ProjectionOperation must not be null!");
|
||||
|
||||
this.name = name;
|
||||
this.operation = operation;
|
||||
this.previousProjection = previousProjection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Projects the result of the previous operation onto the current field. Will automatically add an exclusion for
|
||||
* {@code _id} as what would be held in it by default will now go into the field just projected into.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation previousOperation() {
|
||||
|
||||
return this.operation.andExclude(Fields.UNDERSCORE_ID) //
|
||||
.and(new PreviousOperationProjection(name));
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a nested field binding for the current field.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation nested(Fields fields) {
|
||||
return this.operation.and(new NestedFieldProjection(name, fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to specify an alias for the previous projection operation.
|
||||
*
|
||||
* @param string
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation as(String alias) {
|
||||
|
||||
if (previousProjection != null) {
|
||||
return this.operation.andReplaceLastOneWith(previousProjection.withAlias(alias));
|
||||
} else {
|
||||
return this.operation.and(new FieldProjection(Fields.field(alias, name), null));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $add} expression that adds the given number to the previously mentioned field.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
return project("add", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $add} expression that adds the value of the given field to the previously mentioned field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("add", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $subtract} expression that subtracts the given number to the previously mentioned field.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder minus(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
return project("subtract", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $subtract} expression that subtracts the value of the given field to the previously mentioned
|
||||
* field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder minus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("subtract", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $multiply} expression that multiplies the given number with the previously mentioned field.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
return project("multiply", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $multiply} expression that multiplies the value of the given field with the previously
|
||||
* mentioned field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("multiply", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $divide} expression that divides the previously mentioned field by the given number.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("divide", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $divide} expression that divides the value of the given field by the previously mentioned
|
||||
* field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("divide", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the previously mentioned field by the given number and returns
|
||||
* the remainder.
|
||||
*
|
||||
* @param number
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("mod", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the value of the given field by the previously mentioned field
|
||||
* and returns the remainder.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("mod", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return this.operation.toDBObject(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a generic projection for the current field.
|
||||
*
|
||||
* @param operation the operation key, e.g. {@code $add}.
|
||||
* @param values the values to be set for the projection operation.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder project(String operation, Object... values) {
|
||||
OperationProjection projectionOperation = new OperationProjection(Fields.field(name), operation, values);
|
||||
return new ProjectionOperationBuilder(name, this.operation.and(projectionOperation), projectionOperation);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Projection} to pull in the result of the previous operation.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class PreviousOperationProjection extends Projection {
|
||||
|
||||
private final String name;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PreviousOperationProjection} for the field with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
*/
|
||||
public PreviousOperationProjection(String name) {
|
||||
super(Fields.field(name));
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(name, Fields.UNDERSCORE_ID_REF);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link FieldProjection} to map a result of a previous {@link AggregationOperation} to a new field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
static class FieldProjection extends Projection {
|
||||
|
||||
private final Field field;
|
||||
private final Object value;
|
||||
|
||||
/**
|
||||
* Creates a new {@link FieldProjection} for the field of the given name, assigning the given value.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param value
|
||||
*/
|
||||
public FieldProjection(String name, Object value) {
|
||||
this(Fields.field(name), value);
|
||||
}
|
||||
|
||||
private FieldProjection(Field field, Object value) {
|
||||
|
||||
super(field);
|
||||
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to easily create {@link FieldProjection}s for the given {@link Fields}. Fields are projected as
|
||||
* references with their given name. A field {@code foo} will be projected as: {@code foo : 1 } .
|
||||
*
|
||||
* @param fields the {@link Fields} to in- or exclude, must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static List<? extends Projection> from(Fields fields) {
|
||||
return from(fields, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to easily create {@link FieldProjection}s for the given {@link Fields}.
|
||||
*
|
||||
* @param fields the {@link Fields} to in- or exclude, must not be {@literal null}.
|
||||
* @param value to use for the given field.
|
||||
* @return
|
||||
*/
|
||||
public static List<FieldProjection> from(Fields fields, Object value) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
List<FieldProjection> projections = new ArrayList<FieldProjection>();
|
||||
|
||||
for (Field field : fields) {
|
||||
projections.add(new FieldProjection(field, value));
|
||||
}
|
||||
|
||||
return projections;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(field.getName(), renderFieldValue(context));
|
||||
}
|
||||
|
||||
private Object renderFieldValue(AggregationOperationContext context) {
|
||||
|
||||
// implicit reference or explicit include?
|
||||
if (value == null || Boolean.TRUE.equals(value)) {
|
||||
|
||||
// check whether referenced field exists in the context
|
||||
FieldReference reference = context.getReference(field.getTarget());
|
||||
|
||||
if (field.getName().equals(field.getTarget())) {
|
||||
|
||||
// render field as included
|
||||
return 1;
|
||||
}
|
||||
|
||||
// render field reference
|
||||
return reference.toString();
|
||||
} else if (Boolean.FALSE.equals(value)) {
|
||||
|
||||
// render field as excluded
|
||||
return 0;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
static class OperationProjection extends Projection {
|
||||
|
||||
private final Field field;
|
||||
private final String operation;
|
||||
private final List<Object> values;
|
||||
|
||||
/**
|
||||
* Creates a new {@link OperationProjection} for the given field.
|
||||
*
|
||||
* @param name the name of the field to add the operation projection for, must not be {@literal null} or empty.
|
||||
* @param operation the actual operation key, must not be {@literal null} or empty.
|
||||
* @param values the values to pass into the operation, must not be {@literal null}.
|
||||
*/
|
||||
public OperationProjection(Field field, String operation, Object[] values) {
|
||||
|
||||
super(field);
|
||||
|
||||
Assert.hasText(operation, "Operation must not be null or empty!");
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
|
||||
this.field = field;
|
||||
this.operation = operation;
|
||||
this.values = Arrays.asList(values);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBList values = new BasicDBList();
|
||||
values.addAll(buildReferences(context));
|
||||
|
||||
DBObject inner = new BasicDBObject("$" + operation, values);
|
||||
|
||||
return new BasicDBObject(this.field.getName(), inner);
|
||||
}
|
||||
|
||||
private List<Object> buildReferences(AggregationOperationContext context) {
|
||||
|
||||
List<Object> result = new ArrayList<Object>(values.size());
|
||||
result.add(context.getReference(field.getTarget()).toString());
|
||||
|
||||
for (Object element : values) {
|
||||
result.add(element instanceof Field ? context.getReference((Field) element).toString() : element);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of this {@link OperationProjection} with the given alias.
|
||||
*
|
||||
* @param alias the alias to set
|
||||
* @return
|
||||
*/
|
||||
public OperationProjection withAlias(String alias) {
|
||||
return new OperationProjection(Fields.field(alias, this.field.getName()), operation, values.toArray());
|
||||
}
|
||||
}
|
||||
|
||||
static class NestedFieldProjection extends Projection {
|
||||
|
||||
private final String name;
|
||||
private final Fields fields;
|
||||
|
||||
public NestedFieldProjection(String name, Fields fields) {
|
||||
|
||||
super(Fields.field(name));
|
||||
this.name = name;
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
DBObject nestedObject = new BasicDBObject();
|
||||
|
||||
for (Field field : fields) {
|
||||
nestedObject.put(field.getName(), context.getReference(field.getTarget()).toString());
|
||||
}
|
||||
|
||||
return new BasicDBObject(name, nestedObject);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for {@link Projection} implementations.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static abstract class Projection {
|
||||
|
||||
private final ExposedField field;
|
||||
|
||||
/**
|
||||
* Creates new {@link Projection} for the given {@link Field}.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
*/
|
||||
public Projection(Field field) {
|
||||
|
||||
Assert.notNull(field, "Field must not be null!");
|
||||
this.field = new ExposedField(field, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the field exposed by the {@link Projection}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
public ExposedField getExposedField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the current {@link Projection} into a {@link DBObject} based on the given
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @param context will never be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public abstract DBObject toDBObject(AggregationOperationContext context);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $skip}-operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/skip/
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class SkipOperation implements AggregationOperation {
|
||||
|
||||
private final long skipCount;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SkipOperation} skipping the given number of elements.
|
||||
*
|
||||
* @param skipCount number of documents to skip.
|
||||
*/
|
||||
public SkipOperation(long skipCount) {
|
||||
|
||||
Assert.isTrue(skipCount >= 0, "Skip count must not be negative!");
|
||||
this.skipCount = skipCount;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$skip", skipCount);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.domain.Sort.Order;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $sort}-operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/sort/#pipe._S_sort
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class SortOperation implements AggregationOperation {
|
||||
|
||||
private final Sort sort;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SortOperation} for the given {@link Sort} instance.
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
*/
|
||||
public SortOperation(Sort sort) {
|
||||
|
||||
Assert.notNull(sort, "Sort must not be null!");
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
public SortOperation and(Direction direction, String... fields) {
|
||||
return and(new Sort(direction, fields));
|
||||
}
|
||||
|
||||
public SortOperation and(Sort sort) {
|
||||
return new SortOperation(this.sort.and(sort));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBObject object = new BasicDBObject();
|
||||
|
||||
for (Order order : sort) {
|
||||
|
||||
// Check reference
|
||||
FieldReference reference = context.getReference(order.getProperty());
|
||||
object.put(reference.getRaw(), order.isAscending() ? 1 : -1);
|
||||
}
|
||||
|
||||
return new BasicDBObject("$sort", object);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.springframework.data.mongodb.core.aggregation.Fields.*;
|
||||
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperationContext} aware of a particular type and a {@link MappingContext} to potentially translate
|
||||
* property references into document field names.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class TypeBasedAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final Class<?> type;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final QueryMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and
|
||||
* {@link QueryMapper}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mapper must not be {@literal null}.
|
||||
*/
|
||||
public TypeBasedAggregationOperationContext(Class<?> type,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext, QueryMapper mapper) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
Assert.notNull(mapper, "QueryMapper must not be null!");
|
||||
|
||||
this.type = type;
|
||||
this.mappingContext = mappingContext;
|
||||
this.mapper = mapper;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return mapper.getMappedObject(dbObject, mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
|
||||
PropertyPath.from(field.getTarget(), type);
|
||||
return getReferenceFor(field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
PropertyPath path = PropertyPath.from(name, type);
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
return getReferenceFor(field(path.getLeafProperty().getSegment(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)));
|
||||
}
|
||||
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
return new FieldReference(new ExposedField(field, true));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* A {@code TypedAggregation} is a special {@link Aggregation} that holds information of the input aggregation type.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class TypedAggregation<I> extends Aggregation {
|
||||
|
||||
private final Class<I> inputType;
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public TypedAggregation(Class<I> inputType, AggregationOperation... operations) {
|
||||
|
||||
super(operations);
|
||||
|
||||
Assert.notNull(inputType, "Input type must not be null!");
|
||||
this.inputType = inputType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the input type for the {@link Aggregation}.
|
||||
*
|
||||
* @return the inputType will never be {@literal null}.
|
||||
*/
|
||||
public Class<I> getInputType() {
|
||||
return inputType;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $unwind}-operation.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/unwind/#pipe._S_unwind
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class UnwindOperation implements AggregationOperation {
|
||||
|
||||
private final ExposedField field;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UnwindOperation} for the given {@link Field}.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
*/
|
||||
public UnwindOperation(Field field) {
|
||||
|
||||
Assert.notNull(field);
|
||||
this.field = new ExposedField(field, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$unwind", context.getReference(field).toString());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Support for the MongoDB aggregation framework.
|
||||
* @since 1.3
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
@@ -39,6 +39,7 @@ import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToURLConverter;
|
||||
@@ -98,6 +99,7 @@ public class CustomConversions {
|
||||
this.converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
this.converters.add(URLToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToURLConverter.INSTANCE);
|
||||
this.converters.add(DBObjectToStringConverter.INSTANCE);
|
||||
this.converters.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
this.converters.addAll(converters);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,16 +18,19 @@ package org.springframework.data.mongodb.core.convert;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.convert.SimpleTypeInformationMapper;
|
||||
import org.springframework.data.convert.DefaultTypeMapper;
|
||||
import org.springframework.data.convert.SimpleTypeInformationMapper;
|
||||
import org.springframework.data.convert.TypeAliasAccessor;
|
||||
import org.springframework.data.convert.TypeInformationMapper;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -37,33 +40,43 @@ import com.mongodb.DBObject;
|
||||
* respectively.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implements MongoTypeMapper {
|
||||
|
||||
public static final String DEFAULT_TYPE_KEY = "_class";
|
||||
@SuppressWarnings("rawtypes")
|
||||
@SuppressWarnings("rawtypes")//
|
||||
private static final TypeInformation<List> LIST_TYPE_INFO = ClassTypeInformation.from(List.class);
|
||||
@SuppressWarnings("rawtypes")
|
||||
@SuppressWarnings("rawtypes")//
|
||||
private static final TypeInformation<Map> MAP_TYPE_INFO = ClassTypeInformation.from(Map.class);
|
||||
private String typeKey = DEFAULT_TYPE_KEY;
|
||||
|
||||
private final TypeAliasAccessor<DBObject> accessor;
|
||||
private final String typeKey;
|
||||
|
||||
public DefaultMongoTypeMapper() {
|
||||
this(DEFAULT_TYPE_KEY, Arrays.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
this(DEFAULT_TYPE_KEY);
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey) {
|
||||
super(new DBObjectTypeAliasAccessor(typeKey));
|
||||
this.typeKey = typeKey;
|
||||
this(typeKey, Arrays.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey, MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext) {
|
||||
super(new DBObjectTypeAliasAccessor(typeKey), mappingContext, Arrays.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
this.typeKey = typeKey;
|
||||
this(typeKey, new DBObjectTypeAliasAccessor(typeKey), mappingContext, Arrays
|
||||
.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey, List<? extends TypeInformationMapper> mappers) {
|
||||
super(new DBObjectTypeAliasAccessor(typeKey), mappers);
|
||||
this(typeKey, new DBObjectTypeAliasAccessor(typeKey), null, mappers);
|
||||
}
|
||||
|
||||
private DefaultMongoTypeMapper(String typeKey, TypeAliasAccessor<DBObject> accessor,
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext, List<? extends TypeInformationMapper> mappers) {
|
||||
|
||||
super(accessor, mappingContext, mappers);
|
||||
|
||||
this.typeKey = typeKey;
|
||||
this.accessor = accessor;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -74,6 +87,31 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implemen
|
||||
return typeKey == null ? false : typeKey.equals(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#writeTypeRestrictions(java.util.Set)
|
||||
*/
|
||||
@Override
|
||||
public void writeTypeRestrictions(DBObject result, Set<Class<?>> restrictedTypes) {
|
||||
|
||||
if (restrictedTypes == null || restrictedTypes.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
BasicDBList restrictedMappedTypes = new BasicDBList();
|
||||
|
||||
for (Class<?> restrictedType : restrictedTypes) {
|
||||
|
||||
Object typeAlias = getAliasFor(ClassTypeInformation.from(restrictedType));
|
||||
|
||||
if (typeAlias != null) {
|
||||
restrictedMappedTypes.add(typeAlias);
|
||||
}
|
||||
}
|
||||
|
||||
accessor.writeTypeTo(result, new BasicDBObject("$in", restrictedMappedTypes));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.convert.DefaultTypeMapper#getFallbackTypeFor(java.lang.Object)
|
||||
*/
|
||||
@@ -83,6 +121,7 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implemen
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypeAliasAccessor} to store aliases in a {@link DBObject}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -71,6 +71,8 @@ import com.mongodb.DBRef;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Jon Brisbin
|
||||
* @author Patrik Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
@@ -123,6 +125,15 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
mappingContext) : typeMapper;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoConverter#getTypeMapper()
|
||||
*/
|
||||
@Override
|
||||
public MongoTypeMapper getTypeMapper() {
|
||||
return this.typeMapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the characters dots potentially contained in a {@link Map} shall be replaced with. By default we don't do
|
||||
* any translation but rather reject a {@link Map} with keys containing dots causing the conversion for the entire
|
||||
@@ -355,8 +366,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
try {
|
||||
Object id = wrapper.getProperty(idProperty, Object.class, fieldAccessOnly);
|
||||
dbo.put("_id", idMapper.convertId(id));
|
||||
} catch (ConversionException ignored) {
|
||||
}
|
||||
} catch (ConversionException ignored) {}
|
||||
}
|
||||
|
||||
// Write the properties
|
||||
@@ -411,8 +421,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (valueType.isMap()) {
|
||||
BasicDBObject mapDbObj = new BasicDBObject();
|
||||
writeMapInternal((Map<Object, Object>) obj, mapDbObj, type);
|
||||
DBObject mapDbObj = createMap((Map<Object, Object>) obj, prop);
|
||||
dbo.put(name, mapDbObj);
|
||||
return;
|
||||
}
|
||||
@@ -492,6 +501,42 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the given {@link Map} using the given {@link MongoPersistentProperty} information.
|
||||
*
|
||||
* @param map must not {@literal null}.
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected DBObject createMap(Map<Object, Object> map, MongoPersistentProperty property) {
|
||||
|
||||
Assert.notNull(map, "Given map must not be null!");
|
||||
Assert.notNull(property, "PersistentProperty must not be null!");
|
||||
|
||||
if (!property.isDbReference()) {
|
||||
return writeMapInternal(map, new BasicDBObject(), property.getTypeInformation());
|
||||
}
|
||||
|
||||
BasicDBObject dbObject = new BasicDBObject();
|
||||
|
||||
for (Map.Entry<Object, Object> entry : map.entrySet()) {
|
||||
|
||||
Object key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
dbObject.put(simpleKey, value != null ? createDBRef(value, property.getDBRef()) : null);
|
||||
|
||||
} else {
|
||||
throw new MappingException("Cannot use a complex object as a key value.");
|
||||
}
|
||||
}
|
||||
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates the given {@link BasicDBList} with values from the given {@link Collection}.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.convert.EntityConverter;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
@@ -26,9 +27,17 @@ import com.mongodb.DBObject;
|
||||
* Central Mongo specific converter interface which combines {@link MongoWriter} and {@link MongoReader}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface MongoConverter extends
|
||||
EntityConverter<MongoPersistentEntity<?>, MongoPersistentProperty, Object, DBObject>, MongoWriter<Object>,
|
||||
EntityReader<Object, DBObject> {
|
||||
|
||||
/**
|
||||
* Returns thw {@link TypeMapper} being used to write type information into {@link DBObject}s created with that
|
||||
* converter.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
MongoTypeMapper getTypeMapper();
|
||||
}
|
||||
|
||||
@@ -24,8 +24,11 @@ import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Wrapper class to contain useful converters for the usage with Mongo.
|
||||
*
|
||||
@@ -147,4 +150,15 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
public static enum DBObjectToStringConverter implements Converter<DBObject, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public String convert(DBObject source) {
|
||||
return source == null ? null : source.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
@@ -32,4 +34,14 @@ public interface MongoTypeMapper extends TypeMapper<DBObject> {
|
||||
* @return
|
||||
*/
|
||||
boolean isTypeKey(String key);
|
||||
|
||||
/**
|
||||
* Writes type restrictions to the given {@link DBObject}. This usually results in an {@code $in}-clause to be
|
||||
* generated that restricts the type-key (e.g. {@code _class}) to be in the set of type aliases for the given
|
||||
* {@code restrictedTypes}.
|
||||
*
|
||||
* @param result must not be {@literal null}
|
||||
* @param restrictedTypes must not be {@literal null}
|
||||
*/
|
||||
void writeTypeRestrictions(DBObject result, Set<Class<?>> restrictedTypes);
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
@@ -47,7 +48,6 @@ import com.mongodb.DBRef;
|
||||
public class QueryMapper {
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MongoConverter converter;
|
||||
@@ -75,9 +75,10 @@ public class QueryMapper {
|
||||
* @param entity can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public DBObject getMappedObject(DBObject query, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (Keyword.isKeyword(query)) {
|
||||
if (isNestedKeyword(query)) {
|
||||
return getMappedKeyword(new Keyword(query), entity);
|
||||
}
|
||||
|
||||
@@ -85,7 +86,17 @@ public class QueryMapper {
|
||||
|
||||
for (String key : query.keySet()) {
|
||||
|
||||
if (Keyword.isKeyword(key)) {
|
||||
// TODO: remove one once QueryMapper can work with Query instances directly
|
||||
if (Query.isRestrictedTypeKey(key)) {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Set<Class<?>> restrictedTypes = (Set<Class<?>>) query.get(key);
|
||||
this.converter.getTypeMapper().writeTypeRestrictions(result, restrictedTypes);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isKeyword(key)) {
|
||||
result.putAll(getMappedKeyword(new Keyword(query, key), entity));
|
||||
continue;
|
||||
}
|
||||
@@ -95,11 +106,11 @@ public class QueryMapper {
|
||||
Object rawValue = query.get(key);
|
||||
String newKey = field.getMappedKey();
|
||||
|
||||
if (Keyword.isKeyword(rawValue) && !field.isIdField()) {
|
||||
if (isNestedKeyword(rawValue) && !field.isIdField()) {
|
||||
Keyword keyword = new Keyword((DBObject) rawValue);
|
||||
result.put(newKey, getMappedKeyword(field, keyword));
|
||||
} else {
|
||||
result.put(newKey, getMappedValue(field, query.get(key)));
|
||||
result.put(newKey, getMappedValue(field, rawValue));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,16 +120,16 @@ public class QueryMapper {
|
||||
/**
|
||||
* Returns the given {@link DBObject} representing a keyword by mapping the keyword's value.
|
||||
*
|
||||
* @param query the {@link DBObject} representing a keyword (e.g. {@code $ne : … } )
|
||||
* @param keyword the {@link DBObject} representing a keyword (e.g. {@code $ne : … } )
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
private DBObject getMappedKeyword(Keyword query, MongoPersistentEntity<?> entity) {
|
||||
private DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity<?> entity) {
|
||||
|
||||
// $or/$nor
|
||||
if (query.key.matches(N_OR_PATTERN) || query.value instanceof Iterable) {
|
||||
if (keyword.isOrOrNor() || keyword.hasIterableValue()) {
|
||||
|
||||
Iterable<?> conditions = (Iterable<?>) query.value;
|
||||
Iterable<?> conditions = keyword.getValue();
|
||||
BasicDBList newConditions = new BasicDBList();
|
||||
|
||||
for (Object condition : conditions) {
|
||||
@@ -126,10 +137,10 @@ public class QueryMapper {
|
||||
: convertSimpleOrDBObject(condition, entity));
|
||||
}
|
||||
|
||||
return new BasicDBObject(query.key, newConditions);
|
||||
return new BasicDBObject(keyword.getKey(), newConditions);
|
||||
}
|
||||
|
||||
return new BasicDBObject(query.key, convertSimpleOrDBObject(query.value, entity));
|
||||
return new BasicDBObject(keyword.getKey(), convertSimpleOrDBObject(keyword.getValue(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -142,10 +153,12 @@ public class QueryMapper {
|
||||
private DBObject getMappedKeyword(Field property, Keyword keyword) {
|
||||
|
||||
boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists();
|
||||
Object value = needsAssociationConversion ? convertAssociation(keyword.value, property.getProperty())
|
||||
: getMappedValue(property.with(keyword.key), keyword.value);
|
||||
Object value = keyword.getValue();
|
||||
|
||||
return new BasicDBObject(keyword.key, value);
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property.getProperty())
|
||||
: getMappedValue(property.with(keyword.getKey()), value);
|
||||
|
||||
return new BasicDBObject(keyword.key, convertedValue);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -183,7 +196,7 @@ public class QueryMapper {
|
||||
}
|
||||
}
|
||||
|
||||
if (Keyword.isKeyword(value)) {
|
||||
if (isNestedKeyword(value)) {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
}
|
||||
|
||||
@@ -204,13 +217,25 @@ public class QueryMapper {
|
||||
private Object convertSimpleOrDBObject(Object source, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof BasicDBList) {
|
||||
return converter.convertToMongoType(source);
|
||||
return delegateConvertToMongoType(source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof DBObject) {
|
||||
return getMappedObject((DBObject) source, entity);
|
||||
}
|
||||
|
||||
return delegateConvertToMongoType(source, entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given source Object to a mongo type with the type information of the original source type omitted.
|
||||
* Subclasses may overwrite this method to retain the type information of the source type on the resulting mongo type.
|
||||
*
|
||||
* @param source
|
||||
* @param entity
|
||||
* @return the converted mongo type or null if source is null
|
||||
*/
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return converter.convertToMongoType(source);
|
||||
}
|
||||
|
||||
@@ -262,7 +287,40 @@ public class QueryMapper {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
return converter.convertToMongoType(id);
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Object} is a keyword, i.e. if it's a {@link DBObject} with a keyword key.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isNestedKeyword(Object candidate) {
|
||||
|
||||
if (!(candidate instanceof BasicDBObject)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BasicDBObject dbObject = (BasicDBObject) candidate;
|
||||
Set<String> keys = dbObject.keySet();
|
||||
|
||||
if (keys.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isKeyword(keys.iterator().next().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link String} is a MongoDB keyword. The default implementation will check against the
|
||||
* set of registered keywords returned by {@link #getKeywords()}.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isKeyword(String candidate) {
|
||||
return candidate.startsWith("$");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -272,8 +330,10 @@ public class QueryMapper {
|
||||
*/
|
||||
private static class Keyword {
|
||||
|
||||
String key;
|
||||
Object value;
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
private final String key;
|
||||
private final Object value;
|
||||
|
||||
public Keyword(DBObject source, String key) {
|
||||
this.key = key;
|
||||
@@ -298,25 +358,21 @@ public class QueryMapper {
|
||||
return "$exists".equalsIgnoreCase(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given value actually represents a keyword. If this returns {@literal true} it's safe to call
|
||||
* the constructor.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public static boolean isKeyword(Object value) {
|
||||
public boolean isOrOrNor() {
|
||||
return key.matches(N_OR_PATTERN);
|
||||
}
|
||||
|
||||
if (value instanceof String) {
|
||||
return ((String) value).startsWith("$");
|
||||
}
|
||||
public boolean hasIterableValue() {
|
||||
return value instanceof Iterable;
|
||||
}
|
||||
|
||||
if (!(value instanceof DBObject)) {
|
||||
return false;
|
||||
}
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
DBObject dbObject = (DBObject) value;
|
||||
return dbObject.keySet().size() == 1 && dbObject.keySet().iterator().next().startsWith("$");
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T getValue() {
|
||||
return (T) value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -332,7 +388,7 @@ public class QueryMapper {
|
||||
protected final String name;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Field} without meta-information but the given name.
|
||||
* Creates a new {@link DocumentField} without meta-information but the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
*/
|
||||
@@ -343,7 +399,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link Field} with the given name.
|
||||
* Returns a new {@link DocumentField} with the given name.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
@@ -399,7 +455,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension of {@link Field} to be backed with mapping metadata.
|
||||
* Extension of {@link DocumentField} to be backed with mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
|
||||
/**
|
||||
* A subclass of {@link QueryMapper} that retains type information on the mongo types.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class UpdateMapper extends QueryMapper {
|
||||
|
||||
private final MongoWriter<?> converter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdateMapper} using the given {@link MongoConverter}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
public UpdateMapper(MongoConverter converter) {
|
||||
|
||||
super(converter);
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given source object to a mongo type retaining the original type information of the source type on the
|
||||
* mongo type.
|
||||
*
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#delegateConvertToMongoType(java.lang.Object,
|
||||
* org.springframework.data.mongodb.core.mapping.MongoPersistentEntity)
|
||||
*/
|
||||
@Override
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
}
|
||||
}
|
||||
@@ -131,7 +131,7 @@ public class GeoResults<T> implements Iterable<GeoResult<T>> {
|
||||
private static Distance calculateAverageDistance(List<? extends GeoResult<?>> results, Metric metric) {
|
||||
|
||||
if (results.isEmpty()) {
|
||||
return new Distance(0, null);
|
||||
return new Distance(0, metric);
|
||||
}
|
||||
|
||||
double averageDistance = 0;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,8 @@ import java.lang.annotation.Target;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
*/
|
||||
@Target({ ElementType.TYPE })
|
||||
@Documented
|
||||
@@ -69,4 +71,20 @@ public @interface CompoundIndex {
|
||||
* @return
|
||||
*/
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
* If {@literal true} the index will be created in the background.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/indexes/#background-construction
|
||||
* @return
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
/**
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/tutorial/expire-data/
|
||||
* @return
|
||||
*/
|
||||
int expireAfterSeconds() default -1;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,18 +18,20 @@ package org.springframework.data.mongodb.core.index;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class Index implements IndexDefinition {
|
||||
|
||||
public enum Duplicates {
|
||||
RETAIN, DROP
|
||||
}
|
||||
|
||||
private final Map<String, Order> fieldSpec = new LinkedHashMap<String, Order>();
|
||||
private final Map<String, Direction> fieldSpec = new LinkedHashMap<String, Direction>();
|
||||
|
||||
private String name;
|
||||
|
||||
@@ -42,12 +44,37 @@ public class Index implements IndexDefinition {
|
||||
public Index() {
|
||||
}
|
||||
|
||||
public Index(String key, Order order) {
|
||||
fieldSpec.put(key, order);
|
||||
public Index(String key, Direction direction) {
|
||||
fieldSpec.put(key, direction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Indexed} on the given key and {@link Order}.
|
||||
*
|
||||
* @deprecated use {@link #Index(String, Direction)} instead.
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param order must not be {@literal null}.
|
||||
*/
|
||||
@Deprecated
|
||||
public Index(String key, Order order) {
|
||||
this(key, order.toDirection());
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given field to the index.
|
||||
*
|
||||
* @deprecated use {@link #on(String, Direction)} instead.
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param order must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public Index on(String key, Order order) {
|
||||
fieldSpec.put(key, order);
|
||||
return on(key, order.toDirection());
|
||||
}
|
||||
|
||||
public Index on(String key, Direction direction) {
|
||||
fieldSpec.put(key, direction);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -76,7 +103,7 @@ public class Index implements IndexDefinition {
|
||||
public DBObject getIndexKeys() {
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : fieldSpec.keySet()) {
|
||||
dbo.put(k, (fieldSpec.get(k).equals(Order.ASCENDING) ? 1 : -1));
|
||||
dbo.put(k, fieldSpec.get(k).equals(Direction.ASC) ? 1 : -1);
|
||||
}
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -24,30 +25,38 @@ import org.springframework.util.ObjectUtils;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public final class IndexField {
|
||||
|
||||
private final String key;
|
||||
private final Order order;
|
||||
private final Direction direction;
|
||||
private final boolean isGeo;
|
||||
|
||||
private IndexField(String key, Order order, boolean isGeo) {
|
||||
private IndexField(String key, Direction direction, boolean isGeo) {
|
||||
|
||||
Assert.hasText(key);
|
||||
Assert.isTrue(order != null ^ isGeo);
|
||||
Assert.isTrue(direction != null ^ isGeo);
|
||||
|
||||
this.key = key;
|
||||
this.order = order;
|
||||
this.direction = direction;
|
||||
this.isGeo = isGeo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a default {@link IndexField} with the given key and {@link Order}.
|
||||
*
|
||||
* @deprecated use {@link #create(String, Direction)}.
|
||||
* @param key must not be {@literal null} or emtpy.
|
||||
* @param order must not be {@literal null}.
|
||||
* @param direction must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public static IndexField create(String key, Order order) {
|
||||
Assert.notNull(order);
|
||||
return new IndexField(key, order.toDirection(), false);
|
||||
}
|
||||
|
||||
public static IndexField create(String key, Direction order) {
|
||||
Assert.notNull(order);
|
||||
return new IndexField(key, order, false);
|
||||
}
|
||||
@@ -70,12 +79,23 @@ public final class IndexField {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the order of the {@link IndexField} or {@literal null} in case we have a geo index field.
|
||||
* Returns the direction of the {@link IndexField} or {@literal null} in case we have a geo index field.
|
||||
*
|
||||
* @return the order
|
||||
* @deprecated use {@link #getDirection()} instead.
|
||||
* @return the direction
|
||||
*/
|
||||
@Deprecated
|
||||
public Order getOrder() {
|
||||
return order;
|
||||
return Direction.ASC.equals(direction) ? Order.ASCENDING : Order.DESCENDING;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the direction of the {@link IndexField} or {@literal null} in case we have a geo index field.
|
||||
*
|
||||
* @return the direction
|
||||
*/
|
||||
public Direction getDirection() {
|
||||
return direction;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -104,7 +124,8 @@ public final class IndexField {
|
||||
|
||||
IndexField that = (IndexField) obj;
|
||||
|
||||
return this.key.equals(that.key) && ObjectUtils.nullSafeEquals(this.order, that.order) && this.isGeo == that.isGeo;
|
||||
return this.key.equals(that.key) && ObjectUtils.nullSafeEquals(this.direction, that.direction)
|
||||
&& this.isGeo == that.isGeo;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -116,7 +137,7 @@ public final class IndexField {
|
||||
|
||||
int result = 17;
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(key);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(order);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(direction);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(isGeo);
|
||||
return result;
|
||||
}
|
||||
@@ -127,6 +148,6 @@ public final class IndexField {
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("IndexField [ key: %s, order: %s, isGeo: %s]", key, order, isGeo);
|
||||
return String.format("IndexField [ key: %s, direction: %s, isGeo: %s]", key, direction, isGeo);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -13,7 +13,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
@@ -24,7 +23,10 @@ import java.lang.annotation.Target;
|
||||
/**
|
||||
* Mark a field to be indexed using MongoDB's indexing feature.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -41,4 +43,20 @@ public @interface Indexed {
|
||||
String name() default "";
|
||||
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
* If {@literal true} the index will be created in the background.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/core/indexes/#background-construction
|
||||
* @return
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
/**
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/tutorial/expire-data/
|
||||
* @return
|
||||
*/
|
||||
int expireAfterSeconds() default -1;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,6 +43,8 @@ import com.mongodb.util.JSON;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
@@ -106,7 +108,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
String indexColl = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
DBObject definition = (DBObject) JSON.parse(index.def());
|
||||
|
||||
ensureIndex(indexColl, index.name(), definition, index.unique(), index.dropDups(), index.sparse());
|
||||
ensureIndex(indexColl, index.name(), definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created compound index " + index);
|
||||
@@ -140,7 +143,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
int direction = index.direction() == IndexDirection.ASCENDING ? 1 : -1;
|
||||
DBObject definition = new BasicDBObject(persistentProperty.getFieldName(), direction);
|
||||
|
||||
ensureIndex(collection, name, definition, index.unique(), index.dropDups(), index.sparse());
|
||||
ensureIndex(collection, name, definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created property index " + index);
|
||||
@@ -189,15 +193,22 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
* @param unique whether it shall be a unique index
|
||||
* @param dropDups whether to drop duplicates
|
||||
* @param sparse sparse or not
|
||||
* @param background whether the index will be created in the background
|
||||
* @param expireAfterSeconds the time to live for documents in the collection
|
||||
*/
|
||||
protected void ensureIndex(String collection, String name, DBObject indexDefinition, boolean unique,
|
||||
boolean dropDups, boolean sparse) {
|
||||
boolean dropDups, boolean sparse, boolean background, int expireAfterSeconds) {
|
||||
|
||||
DBObject opts = new BasicDBObject();
|
||||
opts.put("name", name);
|
||||
opts.put("dropDups", dropDups);
|
||||
opts.put("sparse", sparse);
|
||||
opts.put("unique", unique);
|
||||
opts.put("background", background);
|
||||
|
||||
if (expireAfterSeconds != -1) {
|
||||
opts.put("expireAfterSeconds", expireAfterSeconds);
|
||||
}
|
||||
|
||||
mongoDbFactory.getDb().getCollection(collection).ensureIndex(indexDefinition, opts);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,20 +15,29 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.context.expression.BeanFactoryAccessor;
|
||||
import org.springframework.context.expression.BeanFactoryResolver;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.model.BasicPersistentEntity;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.MongoCollectionUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.ParserContext;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.expression.spel.support.StandardEvaluationContext;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -37,10 +46,12 @@ import org.springframework.util.StringUtils;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, MongoPersistentProperty> implements
|
||||
MongoPersistentEntity<T>, ApplicationContextAware {
|
||||
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @DocumentField annotation!";
|
||||
private final String collection;
|
||||
private final SpelExpressionParser parser;
|
||||
private final StandardEvaluationContext context;
|
||||
@@ -89,6 +100,19 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
return expression.getValue(context, String.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.BasicPersistentEntity#verify()
|
||||
*/
|
||||
@Override
|
||||
public void verify() {
|
||||
|
||||
AssertFieldNameUniquenessHandler handler = new AssertFieldNameUniquenessHandler();
|
||||
|
||||
doWithProperties(handler);
|
||||
doWithAssociations(handler);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Comparator} implementation inspecting the {@link MongoPersistentProperty}'s order.
|
||||
*
|
||||
@@ -115,4 +139,91 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
return o1.getFieldOrder() - o2.getFieldOrder();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* As a general note: An implicit id property has a name that matches "id" or "_id". An explicit id property is one
|
||||
* that is annotated with @see {@link Id}. The property id is updated according to the following rules: 1) An id
|
||||
* property which is defined explicitly takes precedence over an implicitly defined id property. 2) In case of any
|
||||
* ambiguity a @see {@link MappingException} is thrown.
|
||||
*
|
||||
* @param property - the new id property candidate
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNull(MongoPersistentProperty property) {
|
||||
|
||||
Assert.notNull(property);
|
||||
|
||||
if (!property.isIdProperty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
MongoPersistentProperty currentIdProperty = getIdProperty();
|
||||
|
||||
boolean currentIdPropertyIsSet = currentIdProperty != null;
|
||||
@SuppressWarnings("null")
|
||||
boolean currentIdPropertyIsExplicit = currentIdPropertyIsSet ? currentIdProperty.isExplicitIdProperty() : false;
|
||||
boolean newIdPropertyIsExplicit = property.isExplicitIdProperty();
|
||||
|
||||
if (!currentIdPropertyIsSet) {
|
||||
return property;
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("null")
|
||||
Field currentIdPropertyField = currentIdProperty.getField();
|
||||
|
||||
if (newIdPropertyIsExplicit && currentIdPropertyIsExplicit) {
|
||||
throw new MappingException(String.format(
|
||||
"Attempt to add explicit id property %s but already have an property %s registered "
|
||||
+ "as explicit id. Check your mapping configuration!", property.getField(), currentIdPropertyField));
|
||||
|
||||
} else if (newIdPropertyIsExplicit && !currentIdPropertyIsExplicit) {
|
||||
// explicit id property takes precedence over implicit id property
|
||||
return property;
|
||||
|
||||
} else if (!newIdPropertyIsExplicit && currentIdPropertyIsExplicit) {
|
||||
// no id property override - current property is explicitly defined
|
||||
|
||||
} else {
|
||||
throw new MappingException(String.format(
|
||||
"Attempt to add id property %s but already have an property %s registered "
|
||||
+ "as id. Check your mapping configuration!", property.getField(), currentIdPropertyField));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handler to collect {@link MongoPersistentProperty} instances and check that each of them is mapped to a distinct
|
||||
* field name.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class AssertFieldNameUniquenessHandler implements PropertyHandler<MongoPersistentProperty>,
|
||||
AssociationHandler<MongoPersistentProperty> {
|
||||
|
||||
private final Map<String, MongoPersistentProperty> properties = new HashMap<String, MongoPersistentProperty>();
|
||||
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
assertUniqueness(persistentProperty);
|
||||
}
|
||||
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
assertUniqueness(association.getInverse());
|
||||
}
|
||||
|
||||
private void assertUniqueness(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
MongoPersistentProperty existingProperty = properties.get(fieldName);
|
||||
|
||||
if (existingProperty != null) {
|
||||
throw new MappingException(String.format(AMBIGUOUS_FIELD_MAPPING, property.toString(),
|
||||
existingProperty.toString(), fieldName));
|
||||
}
|
||||
|
||||
properties.put(fieldName, property);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,8 +24,10 @@ import java.util.Set;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -37,6 +39,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty<MongoPersistentProperty> implements
|
||||
MongoPersistentProperty {
|
||||
@@ -60,6 +63,8 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
CAUSE_FIELD = ReflectionUtils.findField(Throwable.class, "cause");
|
||||
}
|
||||
|
||||
private final FieldNamingStrategy fieldNamingStrategy;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentProperty}.
|
||||
*
|
||||
@@ -67,10 +72,14 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
* @param propertyDescriptor
|
||||
* @param owner
|
||||
* @param simpleTypeHolder
|
||||
* @param fieldNamingStrategy
|
||||
*/
|
||||
public BasicMongoPersistentProperty(Field field, PropertyDescriptor propertyDescriptor,
|
||||
MongoPersistentEntity<?> owner, SimpleTypeHolder simpleTypeHolder) {
|
||||
MongoPersistentEntity<?> owner, SimpleTypeHolder simpleTypeHolder, FieldNamingStrategy fieldNamingStrategy) {
|
||||
|
||||
super(field, propertyDescriptor, owner, simpleTypeHolder);
|
||||
this.fieldNamingStrategy = fieldNamingStrategy == null ? PropertyNameFieldNamingStrategy.INSTANCE
|
||||
: fieldNamingStrategy;
|
||||
|
||||
if (isIdProperty() && getFieldName() != ID_FIELD_NAME) {
|
||||
LOG.warn("Customizing field name for id property not allowed! Custom name will not be considered!");
|
||||
@@ -102,6 +111,15 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return SUPPORTED_ID_PROPERTY_NAMES.contains(field.getName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isExplicitIdProperty()
|
||||
*/
|
||||
@Override
|
||||
public boolean isExplicitIdProperty() {
|
||||
return isAnnotationPresent(Id.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key to be used to store the value of the property inside a Mongo {@link DBObject}.
|
||||
*
|
||||
@@ -110,12 +128,34 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
public String getFieldName() {
|
||||
|
||||
if (isIdProperty()) {
|
||||
return ID_FIELD_NAME;
|
||||
|
||||
if (owner == null) {
|
||||
return ID_FIELD_NAME;
|
||||
}
|
||||
|
||||
if (owner.getIdProperty() == null) {
|
||||
return ID_FIELD_NAME;
|
||||
}
|
||||
|
||||
if (owner.isIdProperty(this)) {
|
||||
return ID_FIELD_NAME;
|
||||
}
|
||||
}
|
||||
|
||||
org.springframework.data.mongodb.core.mapping.Field annotation = getField().getAnnotation(
|
||||
org.springframework.data.mongodb.core.mapping.Field.class);
|
||||
return annotation != null && StringUtils.hasText(annotation.value()) ? annotation.value() : field.getName();
|
||||
org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation(org.springframework.data.mongodb.core.mapping.Field.class);
|
||||
|
||||
if (annotation != null && StringUtils.hasText(annotation.value())) {
|
||||
return annotation.value();
|
||||
}
|
||||
|
||||
String fieldName = fieldNamingStrategy.getFieldName(this);
|
||||
|
||||
if (!StringUtils.hasText(fieldName)) {
|
||||
throw new MappingException(String.format("Invalid (null or empty) field name returned for property %s by %s!",
|
||||
this, fieldNamingStrategy.getClass()));
|
||||
}
|
||||
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -38,10 +38,11 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
* @param propertyDescriptor
|
||||
* @param owner
|
||||
* @param simpleTypeHolder
|
||||
* @param fieldNamingStrategy
|
||||
*/
|
||||
public CachingMongoPersistentProperty(Field field, PropertyDescriptor propertyDescriptor,
|
||||
MongoPersistentEntity<?> owner, SimpleTypeHolder simpleTypeHolder) {
|
||||
super(field, propertyDescriptor, owner, simpleTypeHolder);
|
||||
MongoPersistentEntity<?> owner, SimpleTypeHolder simpleTypeHolder, FieldNamingStrategy fieldNamingStrategy) {
|
||||
super(field, propertyDescriptor, owner, simpleTypeHolder, fieldNamingStrategy);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* {@link FieldNamingStrategy} that abbreviates field names by using the very first letter of the camel case parts of
|
||||
* the {@link MongoPersistentProperty}'s name.
|
||||
*
|
||||
* @since 1.3
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class CamelCaseAbbreviatingFieldNamingStrategy implements FieldNamingStrategy {
|
||||
|
||||
private static final String CAMEL_CASE_PATTERN = "(?<!(^|[A-Z]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])";
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.FieldNamingStrategy#getFieldName(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
|
||||
*/
|
||||
public String getFieldName(MongoPersistentProperty property) {
|
||||
|
||||
String[] parts = property.getName().split(CAMEL_CASE_PATTERN);
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
||||
for (String part : parts) {
|
||||
builder.append(part.substring(0, 1).toLowerCase(Locale.US));
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
/**
|
||||
* SPI interface to determine how to name document fields in cases the field name is not manually defined.
|
||||
*
|
||||
* @see DocumentField
|
||||
* @see PropertyNameFieldNamingStrategy
|
||||
* @see CamelCaseAbbreviatingFieldNamingStrategy
|
||||
* @since 1.3
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface FieldNamingStrategy {
|
||||
|
||||
/**
|
||||
* Returns the field name to be used for the given {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param property must not be {@literal null} or empty;
|
||||
* @return
|
||||
*/
|
||||
String getFieldName(MongoPersistentProperty property);
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -37,6 +37,9 @@ import org.springframework.data.util.TypeInformation;
|
||||
public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersistentEntity<?>, MongoPersistentProperty>
|
||||
implements ApplicationContextAware {
|
||||
|
||||
private static final FieldNamingStrategy DEFAULT_NAMING_STRATEGY = PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
|
||||
private FieldNamingStrategy fieldNamingStrategy = DEFAULT_NAMING_STRATEGY;
|
||||
private ApplicationContext context;
|
||||
|
||||
/**
|
||||
@@ -46,6 +49,17 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
setSimpleTypeHolder(MongoSimpleTypes.HOLDER);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link FieldNamingStrategy} to be used to determine the field name if no manual mapping is applied.
|
||||
* Defaults to a strategy using the plain property name.
|
||||
*
|
||||
* @param fieldNamingStrategy the {@link FieldNamingStrategy} to be used to determine the field name if no manual
|
||||
* mapping is applied.
|
||||
*/
|
||||
public void setFieldNamingStrategy(FieldNamingStrategy fieldNamingStrategy) {
|
||||
this.fieldNamingStrategy = fieldNamingStrategy == null ? DEFAULT_NAMING_STRATEGY : fieldNamingStrategy;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.context.AbstractMappingContext#shouldCreatePersistentEntityFor(org.springframework.data.util.TypeInformation)
|
||||
@@ -62,7 +76,7 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
@Override
|
||||
public MongoPersistentProperty createPersistentProperty(Field field, PropertyDescriptor descriptor,
|
||||
BasicMongoPersistentEntity<?> owner, SimpleTypeHolder simpleTypeHolder) {
|
||||
return new CachingMongoPersistentProperty(field, descriptor, owner, simpleTypeHolder);
|
||||
return new CachingMongoPersistentProperty(field, descriptor, owner, simpleTypeHolder, fieldNamingStrategy);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -87,8 +101,6 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
*/
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
|
||||
this.context = applicationContext;
|
||||
super.setApplicationContext(applicationContext);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,8 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
|
||||
/**
|
||||
@@ -23,6 +25,7 @@ import org.springframework.data.mapping.PersistentProperty;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface MongoPersistentProperty extends PersistentProperty<MongoPersistentProperty> {
|
||||
|
||||
@@ -48,6 +51,14 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
*/
|
||||
boolean isDbReference();
|
||||
|
||||
/**
|
||||
* Returns whether the property is explicitly marked as an identifier property of the owning {@link PersistentEntity}.
|
||||
* A property is an explicit id property if it is annotated with @see {@link Id}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean isExplicitIdProperty();
|
||||
|
||||
/**
|
||||
* Returns the {@link DBRef} if the property is a reference.
|
||||
*
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
/**
|
||||
* {@link FieldNamingStrategy} simply using the {@link MongoPersistentProperty}'s name.
|
||||
*
|
||||
* @since 1.3
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public enum PropertyNameFieldNamingStrategy implements FieldNamingStrategy {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.FieldNamingStrategy#getFieldName(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
|
||||
*/
|
||||
public String getFieldName(MongoPersistentProperty property) {
|
||||
return property.getName();
|
||||
}
|
||||
}
|
||||
@@ -1,167 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012-2012 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.context.AbstractMappingContext;
|
||||
import org.springframework.data.mapping.model.AbstractPersistentProperty;
|
||||
import org.springframework.data.mapping.model.BasicPersistentEntity;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.MongoCollectionUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
/**
|
||||
* @deprecated use {@link MongoMappingContext} instead.
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Deprecated
|
||||
public class SimpleMongoMappingContext extends
|
||||
AbstractMappingContext<SimpleMongoMappingContext.SimpleMongoPersistentEntity<?>, MongoPersistentProperty> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.context.AbstractMappingContext#createPersistentEntity(org.springframework.data.util.TypeInformation)
|
||||
*/
|
||||
@Override
|
||||
protected <T> SimpleMongoPersistentEntity<T> createPersistentEntity(TypeInformation<T> typeInformation) {
|
||||
return new SimpleMongoPersistentEntity<T>(typeInformation);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.context.AbstractMappingContext#createPersistentProperty(java.lang.reflect.Field, java.beans.PropertyDescriptor, org.springframework.data.mapping.model.MutablePersistentEntity, org.springframework.data.mapping.model.SimpleTypeHolder)
|
||||
*/
|
||||
@Override
|
||||
protected SimplePersistentProperty createPersistentProperty(Field field, PropertyDescriptor descriptor,
|
||||
SimpleMongoPersistentEntity<?> owner, SimpleTypeHolder simpleTypeHolder) {
|
||||
return new SimplePersistentProperty(field, descriptor, owner, simpleTypeHolder);
|
||||
}
|
||||
|
||||
static class SimplePersistentProperty extends AbstractPersistentProperty<MongoPersistentProperty> implements
|
||||
MongoPersistentProperty {
|
||||
|
||||
private static final List<String> ID_FIELD_NAMES = Arrays.asList("id", "_id");
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimplePersistentProperty}.
|
||||
*
|
||||
* @param field
|
||||
* @param propertyDescriptor
|
||||
* @param information
|
||||
*/
|
||||
public SimplePersistentProperty(Field field, PropertyDescriptor propertyDescriptor, MongoPersistentEntity<?> owner,
|
||||
SimpleTypeHolder simpleTypeHolder) {
|
||||
super(field, propertyDescriptor, owner, simpleTypeHolder);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.BasicPersistentProperty#isIdProperty()
|
||||
*/
|
||||
public boolean isIdProperty() {
|
||||
return ID_FIELD_NAMES.contains(field.getName());
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.mapping.MongoPersistentProperty#getKey()
|
||||
*/
|
||||
public String getFieldName() {
|
||||
return isIdProperty() ? "_id" : getName();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.mapping.MongoPersistentProperty#getFieldOrder()
|
||||
*/
|
||||
public int getFieldOrder() {
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.AbstractPersistentProperty#createAssociation()
|
||||
*/
|
||||
@Override
|
||||
protected Association<MongoPersistentProperty> createAssociation() {
|
||||
return new Association<MongoPersistentProperty>(this, null);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.mapping.MongoPersistentProperty#isDbReference()
|
||||
*/
|
||||
public boolean isDbReference() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.mapping.MongoPersistentProperty#getDBRef()
|
||||
*/
|
||||
public DBRef getDBRef() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.mapping.MongoPersistentProperty#isVersion()
|
||||
*/
|
||||
public boolean isVersionProperty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#usePropertyAccess()
|
||||
*/
|
||||
public boolean usePropertyAccess() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static class SimpleMongoPersistentEntity<T> extends BasicPersistentEntity<T, MongoPersistentProperty> implements
|
||||
MongoPersistentEntity<T> {
|
||||
|
||||
/**
|
||||
* @param information
|
||||
*/
|
||||
public SimpleMongoPersistentEntity(TypeInformation<T> information) {
|
||||
super(information);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.mapping.MongoPersistentEntity#getCollection()
|
||||
*/
|
||||
public String getCollection() {
|
||||
return MongoCollectionUtils.getPreferredCollectionName(getType());
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.mapping.MongoPersistentEntity#getVersionProperty()
|
||||
*/
|
||||
public MongoPersistentProperty getVersionProperty() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#hasVersionProperty()
|
||||
*/
|
||||
public boolean hasVersionProperty() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Base class for delete events.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
public abstract class AbstractDeleteEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private final Class<T> type;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type , possibly be {@literal null}.
|
||||
*/
|
||||
public AbstractDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
|
||||
super(dbo, dbo);
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the type for which the {@link AbstractDeleteEvent} shall be invoked for.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Class<T> getType() {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 by the original author(s).
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
public abstract class AbstractMongoEventListener<E> implements ApplicationListener<MongoMappingEvent<?>> {
|
||||
|
||||
@@ -45,6 +46,7 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public void onApplicationEvent(MongoMappingEvent<?> event) {
|
||||
|
||||
if (event instanceof AfterLoadEvent) {
|
||||
@@ -57,6 +59,22 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
return;
|
||||
}
|
||||
|
||||
if (event instanceof AbstractDeleteEvent) {
|
||||
|
||||
Class<?> eventDomainType = ((AbstractDeleteEvent) event).getType();
|
||||
|
||||
if (eventDomainType != null && domainClass.isAssignableFrom(eventDomainType)) {
|
||||
if (event instanceof BeforeDeleteEvent) {
|
||||
onBeforeDelete(event.getDBObject());
|
||||
}
|
||||
if (event instanceof AfterDeleteEvent) {
|
||||
onAfterDelete(event.getDBObject());
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
E source = (E) event.getSource();
|
||||
|
||||
@@ -78,31 +96,43 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
|
||||
public void onBeforeConvert(E source) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeConvert(" + source + ")");
|
||||
LOG.debug("onBeforeConvert({})", source);
|
||||
}
|
||||
}
|
||||
|
||||
public void onBeforeSave(E source, DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeSave(" + source + ", " + dbo + ")");
|
||||
LOG.debug("onBeforeSave({}, {})", source, dbo);
|
||||
}
|
||||
}
|
||||
|
||||
public void onAfterSave(E source, DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterSave(" + source + ", " + dbo + ")");
|
||||
LOG.debug("onAfterSave({}, {})", source, dbo);
|
||||
}
|
||||
}
|
||||
|
||||
public void onAfterLoad(DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterLoad(" + dbo + ")");
|
||||
LOG.debug("onAfterLoad({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
public void onAfterConvert(DBObject dbo, E source) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert(" + dbo + "," + source + ")");
|
||||
LOG.debug("onAfterConvert({}, {})", dbo, source);
|
||||
}
|
||||
}
|
||||
|
||||
public void onAfterDelete(DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
public void onBeforeDelete(DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({})", dbo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Event being thrown after a single or a set of documents has/have been deleted. The {@link DBObject} held in the event
|
||||
* will be the query document <em>after</am> it has been mapped onto the domain type handled.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
public class AfterDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AfterDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
*/
|
||||
public AfterDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
super(dbo, type);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Event being thrown before a document is deleted. The {@link DBObject} held in the event will represent the query
|
||||
* document <em>before</em> being mapped based on the domain class handled.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
public class BeforeDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
|
||||
private static final long serialVersionUID = -2627547705679734497L;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BeforeDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
*/
|
||||
public BeforeDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
super(dbo, type);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,15 +17,19 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* {@link ApplicationListener} for Mongo mapping events logging the events.
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(LoggingEventListener.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(LoggingEventListener.class);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -33,7 +37,7 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeConvert(Object source) {
|
||||
log.info("onBeforeConvert: " + source);
|
||||
LOGGER.info("onBeforeConvert: {}", source);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -42,10 +46,7 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeSave(Object source, DBObject dbo) {
|
||||
try {
|
||||
log.info("onBeforeSave: " + source + ", " + dbo);
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
LOGGER.info("onBeforeSave: {}, {}", source, dbo);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -54,7 +55,7 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onAfterSave(Object source, DBObject dbo) {
|
||||
log.info("onAfterSave: " + source + ", " + dbo);
|
||||
LOGGER.info("onAfterSave: {}, {}", source, dbo);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -63,7 +64,7 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onAfterLoad(DBObject dbo) {
|
||||
log.info("onAfterLoad: " + dbo);
|
||||
LOGGER.info("onAfterLoad: {}", dbo);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -72,6 +73,24 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onAfterConvert(DBObject dbo, Object source) {
|
||||
log.info("onAfterConvert: " + dbo + ", " + source);
|
||||
LOGGER.info("onAfterConvert: {}, {}", dbo, source);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterDelete(com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public void onAfterDelete(DBObject dbo) {
|
||||
LOGGER.info("onAfterDelete: {}", dbo);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeDelete(com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeDelete(DBObject dbo) {
|
||||
LOGGER.info("onBeforeDelete: {}", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -39,6 +39,10 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* Central class for creating queries. It follows a fluent API style so that you can easily chain together multiple
|
||||
* criteria. Static import of the 'Criteria.where' method will improve readability.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class Criteria implements CriteriaDefinition {
|
||||
|
||||
@@ -396,34 +400,54 @@ public class Criteria implements CriteriaDefinition {
|
||||
|
||||
/**
|
||||
* Creates an 'or' criteria using the $or operator for all of the provided criteria
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $or operator to be wrapped in a $not operator.
|
||||
* <p>
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #orOperator(Criteria...)} follows a not() call directly.
|
||||
* @param criteria
|
||||
*/
|
||||
public Criteria orOperator(Criteria... criteria) {
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
criteriaChain.add(new Criteria("$or").is(bsonList));
|
||||
return this;
|
||||
return registerCriteriaChainElement(new Criteria("$or").is(bsonList));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a 'nor' criteria using the $nor operator for all of the provided criteria
|
||||
* Creates a 'nor' criteria using the $nor operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $nor operator to be wrapped in a $not operator.
|
||||
* <p>
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #norOperator(Criteria...)} follows a not() call directly.
|
||||
* @param criteria
|
||||
*/
|
||||
public Criteria norOperator(Criteria... criteria) {
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
criteriaChain.add(new Criteria("$nor").is(bsonList));
|
||||
return this;
|
||||
return registerCriteriaChainElement(new Criteria("$nor").is(bsonList));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an 'and' criteria using the $and operator for all of the provided criteria
|
||||
* Creates an 'and' criteria using the $and operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $and operator to be wrapped in a $not operator.
|
||||
* <p>
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #andOperator(Criteria...)} follows a not() call directly.
|
||||
* @param criteria
|
||||
*/
|
||||
public Criteria andOperator(Criteria... criteria) {
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
criteriaChain.add(new Criteria("$and").is(bsonList));
|
||||
return registerCriteriaChainElement(new Criteria("$and").is(bsonList));
|
||||
}
|
||||
|
||||
private Criteria registerCriteriaChainElement(Criteria criteria) {
|
||||
|
||||
if (lastOperatorWasNot()) {
|
||||
throw new IllegalArgumentException("operator $not is not allowed around criteria chain element: "
|
||||
+ criteria.getCriteriaObject());
|
||||
} else {
|
||||
criteriaChain.add(criteria);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -468,6 +492,7 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DBObject queryCriteria = new BasicDBObject();
|
||||
if (isValue != NOT_SET) {
|
||||
queryCriteria.put(this.key, this.isValue);
|
||||
|
||||
@@ -17,16 +17,26 @@ package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
*/
|
||||
public class Field {
|
||||
|
||||
private final Map<String, Integer> criteria = new HashMap<String, Integer>();
|
||||
private final Map<String, Object> slices = new HashMap<String, Object>();
|
||||
private final Map<String, Criteria> elemMatchs = new HashMap<String, Criteria>();
|
||||
private String postionKey;
|
||||
private int positionValue;
|
||||
|
||||
public Field include(String key) {
|
||||
criteria.put(key, Integer.valueOf(1));
|
||||
@@ -48,14 +58,50 @@ public class Field {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Field elemMatch(String key, Criteria elemMatchCriteria) {
|
||||
elemMatchs.put(key, elemMatchCriteria);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The array field must appear in the query. Only one positional {@code $} operator can appear in the projection and
|
||||
* only one array field can appear in the query.
|
||||
*
|
||||
* @param field query array field, must not be {@literal null} or empty.
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public Field position(String field, int value) {
|
||||
|
||||
Assert.hasText(field, "DocumentField must not be null or empty!");
|
||||
|
||||
postionKey = field;
|
||||
positionValue = value;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public DBObject getFieldsObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
|
||||
for (String k : criteria.keySet()) {
|
||||
dbo.put(k, criteria.get(k));
|
||||
}
|
||||
|
||||
for (String k : slices.keySet()) {
|
||||
dbo.put(k, new BasicDBObject("$slice", slices.get(k)));
|
||||
}
|
||||
|
||||
for (Entry<String, Criteria> entry : elemMatchs.entrySet()) {
|
||||
DBObject dbObject = new BasicDBObject("$elemMatch", entry.getValue().getCriteriaObject());
|
||||
dbo.put(entry.getKey(), dbObject);
|
||||
}
|
||||
|
||||
if (postionKey != null) {
|
||||
dbo.put(postionKey + ".$", positionValue);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@@ -84,7 +130,15 @@ public class Field {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
if (!this.elemMatchs.equals(that.elemMatchs)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean samePositionKey = this.postionKey == null ? that.postionKey == null : this.postionKey
|
||||
.equals(that.postionKey);
|
||||
boolean samePositionValue = this.positionValue == that.positionValue;
|
||||
|
||||
return samePositionKey && samePositionValue;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -97,7 +151,10 @@ public class Field {
|
||||
int result = 17;
|
||||
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.criteria);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.elemMatchs);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.slices);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.postionKey);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.positionValue);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.mongodb.core.geo.CustomMetric;
|
||||
import org.springframework.data.mongodb.core.geo.Distance;
|
||||
import org.springframework.data.mongodb.core.geo.Metric;
|
||||
@@ -29,6 +30,7 @@ import com.mongodb.DBObject;
|
||||
* Builder class to build near-queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class NearQuery {
|
||||
|
||||
@@ -38,6 +40,7 @@ public class NearQuery {
|
||||
private Metric metric;
|
||||
private boolean spherical;
|
||||
private Integer num;
|
||||
private Integer skip;
|
||||
|
||||
/**
|
||||
* Creates a new {@link NearQuery}.
|
||||
@@ -116,7 +119,7 @@ public class NearQuery {
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the number of results to return.
|
||||
* Configures the maximum number of results to return.
|
||||
*
|
||||
* @param num
|
||||
* @return
|
||||
@@ -126,6 +129,29 @@ public class NearQuery {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the number of results to skip.
|
||||
*
|
||||
* @param skip
|
||||
* @return
|
||||
*/
|
||||
public NearQuery skip(int skip) {
|
||||
this.skip = skip;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link Pageable} to use.
|
||||
*
|
||||
* @param pageable
|
||||
* @return
|
||||
*/
|
||||
public NearQuery with(Pageable pageable) {
|
||||
this.num = pageable.getOffset() + pageable.getPageSize();
|
||||
this.skip = pageable.getOffset();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the max distance results shall have from the configured origin. If a {@link Metric} was set before the given
|
||||
* value will be interpreted as being a value in that metric. E.g.
|
||||
@@ -290,9 +316,18 @@ public class NearQuery {
|
||||
*/
|
||||
public NearQuery query(Query query) {
|
||||
this.query = query;
|
||||
this.skip = query.getSkip();
|
||||
this.num = query.getLimit();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of elements to skip.
|
||||
*/
|
||||
public Integer getSkip() {
|
||||
return skip;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link DBObject} built by the {@link NearQuery}.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,11 +15,31 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
|
||||
/**
|
||||
* An enum that specifies the ordering for sort or index specifications
|
||||
*
|
||||
* @author trisberg
|
||||
* @deprecated prefer {@link Direction}
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Deprecated
|
||||
public enum Order {
|
||||
ASCENDING, DESCENDING
|
||||
|
||||
ASCENDING {
|
||||
@Override
|
||||
public Direction toDirection() {
|
||||
return Direction.ASC;
|
||||
}
|
||||
},
|
||||
|
||||
DESCENDING {
|
||||
@Override
|
||||
public Direction toDirection() {
|
||||
return Direction.DESC;
|
||||
}
|
||||
};
|
||||
|
||||
public abstract Direction toDirection();
|
||||
}
|
||||
|
||||
@@ -19,11 +19,15 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
import static org.springframework.util.ObjectUtils.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Order;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -33,14 +37,16 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class Query {
|
||||
|
||||
private final static String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
|
||||
|
||||
private final Set<Class<?>> restrictedTypes = new HashSet<Class<?>>();
|
||||
private LinkedHashMap<String, Criteria> criteria = new LinkedHashMap<String, Criteria>();
|
||||
private Field fieldSpec;
|
||||
private Sort coreSort;
|
||||
@SuppressWarnings("deprecation")
|
||||
private org.springframework.data.mongodb.core.query.Sort sort;
|
||||
private Sort sort;
|
||||
private int skip;
|
||||
private int limit;
|
||||
private String hint;
|
||||
@@ -55,8 +61,7 @@ public class Query {
|
||||
return new Query(criteria);
|
||||
}
|
||||
|
||||
public Query() {
|
||||
}
|
||||
public Query() {}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Query} using the given {@link Criteria}.
|
||||
@@ -115,21 +120,6 @@ public class Query {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link org.springframework.data.mongodb.core.query.Sort} instance to define ordering properties.
|
||||
*
|
||||
* @deprecated use {@link #with(Sort)} instead
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public org.springframework.data.mongodb.core.query.Sort sort() {
|
||||
if (this.sort == null) {
|
||||
this.sort = new org.springframework.data.mongodb.core.query.Sort();
|
||||
}
|
||||
|
||||
return this.sort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the given pagination information on the {@link Query} instance. Will transparently set {@code skip} and
|
||||
* {@code limit} as well as applying the {@link Sort} instance defined with the {@link Pageable}.
|
||||
@@ -161,22 +151,62 @@ public class Query {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (this.coreSort == null) {
|
||||
this.coreSort = sort;
|
||||
for (Order order : sort) {
|
||||
if (order.isIgnoreCase()) {
|
||||
throw new IllegalArgumentException(String.format("Gven sort contained an Order for %s with ignore case! "
|
||||
+ "MongoDB does not support sorting ignoreing case currently!", order.getProperty()));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.sort == null) {
|
||||
this.sort = sort;
|
||||
} else {
|
||||
this.coreSort = this.coreSort.and(sort);
|
||||
this.sort = this.sort.and(sort);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the restrictedTypes
|
||||
*/
|
||||
public Set<Class<?>> getRestrictedTypes() {
|
||||
return restrictedTypes == null ? Collections.<Class<?>> emptySet() : restrictedTypes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Restricts the query to only return documents instances that are exactly of the given types.
|
||||
*
|
||||
* @param type may not be {@literal null}
|
||||
* @param additionalTypes may not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Query restrict(Class<?> type, Class<?>... additionalTypes) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.notNull(additionalTypes, "AdditionalTypes must not be null");
|
||||
|
||||
restrictedTypes.add(type);
|
||||
for (Class<?> additionalType : additionalTypes) {
|
||||
restrictedTypes.add(additionalType);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public DBObject getQueryObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : criteria.keySet()) {
|
||||
CriteriaDefinition c = criteria.get(k);
|
||||
DBObject cl = c.getCriteriaObject();
|
||||
dbo.putAll(cl);
|
||||
}
|
||||
|
||||
if (!restrictedTypes.isEmpty()) {
|
||||
dbo.put(RESTRICTED_TYPES_KEY, getRestrictedTypes());
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@@ -187,25 +217,20 @@ public class Query {
|
||||
return fieldSpec.getFieldsObject();
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public DBObject getSortObject() {
|
||||
|
||||
if (this.coreSort == null && this.sort == null) {
|
||||
if (this.sort == null && this.sort == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
|
||||
if (this.coreSort != null) {
|
||||
for (org.springframework.data.domain.Sort.Order order : this.coreSort) {
|
||||
if (this.sort != null) {
|
||||
for (org.springframework.data.domain.Sort.Order order : this.sort) {
|
||||
dbo.put(order.getProperty(), order.isAscending() ? 1 : -1);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.sort != null) {
|
||||
dbo.putAll(this.sort.getSortObject());
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@@ -280,4 +305,17 @@ public class Query {
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given key is the one used to hold the type restriction information.
|
||||
*
|
||||
* @deprecated don't call this method as the restricted type handling will undergo some significant changes going
|
||||
* forward.
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public static boolean isRestrictedTypeKey(String key) {
|
||||
return RESTRICTED_TYPES_KEY.equals(key);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
/*
|
||||
* Copyright 2010-2012 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Helper class to define sorting criterias for a Query instance.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @deprecated use {@link org.springframework.data.domain.Sort} instead. See
|
||||
* {@link Query#with(org.springframework.data.domain.Sort)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class Sort {
|
||||
|
||||
private Map<String, Order> fieldSpec = new LinkedHashMap<String, Order>();
|
||||
|
||||
public Sort() {
|
||||
}
|
||||
|
||||
public Sort(String key, Order order) {
|
||||
fieldSpec.put(key, order);
|
||||
}
|
||||
|
||||
public Sort on(String key, Order order) {
|
||||
fieldSpec.put(key, order);
|
||||
return this;
|
||||
}
|
||||
|
||||
public DBObject getSortObject() {
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : fieldSpec.keySet()) {
|
||||
dbo.put(k, fieldSpec.get(k).equals(Order.ASCENDING) ? 1 : -1);
|
||||
}
|
||||
return dbo;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,7 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import java.lang.annotation.*;
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation to declare finder queries directly on repository methods. Both attributes allow using a placeholder
|
||||
@@ -43,4 +47,12 @@ public @interface Query {
|
||||
* @return
|
||||
*/
|
||||
String fields() default "";
|
||||
|
||||
/**
|
||||
* Returns whether the query defined should be executed as count projection.
|
||||
*
|
||||
* @since 1.3
|
||||
* @return
|
||||
*/
|
||||
boolean count() default false;
|
||||
}
|
||||
|
||||
@@ -53,14 +53,6 @@ public class MongoRepositoryBean<T> extends CdiRepositoryBean<T> {
|
||||
this.operations = operations;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see javax.enterprise.inject.spi.Bean#getScope()
|
||||
*/
|
||||
public Class<? extends Annotation> getScope() {
|
||||
return operations.getScope();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.cdi.CdiRepositoryBean#create(javax.enterprise.context.spi.CreationalContext, java.lang.Class)
|
||||
|
||||
@@ -17,6 +17,8 @@ package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
@@ -36,9 +38,12 @@ import org.springframework.util.Assert;
|
||||
* Base class for {@link RepositoryQuery} implementations for Mongo.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
private static final ConversionService CONVERSION_SERVICE = new DefaultConversionService();
|
||||
|
||||
private final MongoQueryMethod method;
|
||||
private final MongoOperations operations;
|
||||
|
||||
@@ -86,18 +91,22 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
return new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
} else if (method.isPageQuery()) {
|
||||
return new PagedExecution(accessor.getPageable()).execute(query);
|
||||
} else {
|
||||
return new SingleEntityExecution().execute(query);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Query} instance using the given {@link ParameterAccessor}
|
||||
*
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected abstract Query createQuery(ConvertingParameterAccessor accessor);
|
||||
Object result = new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
|
||||
if (result == null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Class<?> expectedReturnType = method.getReturnType().getType();
|
||||
|
||||
if (expectedReturnType.isAssignableFrom(result.getClass())) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return CONVERSION_SERVICE.convert(result, expectedReturnType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to
|
||||
@@ -111,6 +120,21 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
return createQuery(accessor);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Query} instance using the given {@link ParameterAccessor}
|
||||
*
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
protected abstract Query createQuery(ConvertingParameterAccessor accessor);
|
||||
|
||||
/**
|
||||
* Returns whether the query should get a count projection applied.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected abstract boolean isCountQuery();
|
||||
|
||||
private abstract class Execution {
|
||||
|
||||
abstract Object execute(Query query);
|
||||
@@ -191,6 +215,12 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*/
|
||||
class SingleEntityExecution extends Execution {
|
||||
|
||||
private final boolean countProjection;
|
||||
|
||||
private SingleEntityExecution(boolean countProjection) {
|
||||
this.countProjection = countProjection;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.core.query.Query)
|
||||
@@ -199,7 +229,8 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return operations.findOne(query, metadata.getJavaType());
|
||||
return countProjection ? operations.count(query, metadata.getJavaType()) : operations.findOne(query,
|
||||
metadata.getJavaType());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -257,6 +288,11 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric());
|
||||
}
|
||||
|
||||
Pageable pageable = accessor.getPageable();
|
||||
if (pageable != null) {
|
||||
nearQuery.with(pageable);
|
||||
}
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return (GeoResults<Object>) operations.geoNear(nearQuery, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import org.springframework.core.MethodParameter;
|
||||
import org.springframework.data.mongodb.core.geo.Distance;
|
||||
import org.springframework.data.mongodb.core.geo.Point;
|
||||
import org.springframework.data.mongodb.repository.Near;
|
||||
import org.springframework.data.mongodb.repository.query.MongoParameters.MongoParameter;
|
||||
import org.springframework.data.repository.query.Parameter;
|
||||
import org.springframework.data.repository.query.Parameters;
|
||||
|
||||
@@ -31,7 +32,7 @@ import org.springframework.data.repository.query.Parameters;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class MongoParameters extends Parameters {
|
||||
public class MongoParameters extends Parameters<MongoParameters, MongoParameter> {
|
||||
|
||||
private final Integer distanceIndex;
|
||||
private Integer nearIndex;
|
||||
@@ -55,6 +56,14 @@ public class MongoParameters extends Parameters {
|
||||
}
|
||||
}
|
||||
|
||||
private MongoParameters(List<MongoParameter> parameters, Integer distanceIndex, Integer nearIndex) {
|
||||
|
||||
super(parameters);
|
||||
|
||||
this.distanceIndex = distanceIndex;
|
||||
this.nearIndex = nearIndex;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private final int getNearIndex(List<Class<?>> parameterTypes) {
|
||||
|
||||
@@ -81,7 +90,7 @@ public class MongoParameters extends Parameters {
|
||||
* @see org.springframework.data.repository.query.Parameters#createParameter(org.springframework.core.MethodParameter)
|
||||
*/
|
||||
@Override
|
||||
protected Parameter createParameter(MethodParameter parameter) {
|
||||
protected MongoParameter createParameter(MethodParameter parameter) {
|
||||
|
||||
MongoParameter mongoParameter = new MongoParameter(parameter);
|
||||
|
||||
@@ -114,6 +123,15 @@ public class MongoParameters extends Parameters {
|
||||
return nearIndex;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.Parameters#createFrom(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
protected MongoParameters createFrom(List<MongoParameter> parameters) {
|
||||
return new MongoParameters(parameters, this.distanceIndex, this.nearIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom {@link Parameter} implementation adding parameters of type {@link Distance} to the special ones.
|
||||
*
|
||||
|
||||
@@ -169,68 +169,68 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
|
||||
switch (type) {
|
||||
case AFTER:
|
||||
case GREATER_THAN:
|
||||
return criteria.gt(parameters.nextConverted(property));
|
||||
case GREATER_THAN_EQUAL:
|
||||
return criteria.gte(parameters.nextConverted(property));
|
||||
case BEFORE:
|
||||
case LESS_THAN:
|
||||
return criteria.lt(parameters.nextConverted(property));
|
||||
case LESS_THAN_EQUAL:
|
||||
return criteria.lte(parameters.nextConverted(property));
|
||||
case BETWEEN:
|
||||
return criteria.gt(parameters.nextConverted(property)).lt(parameters.nextConverted(property));
|
||||
case IS_NOT_NULL:
|
||||
return criteria.ne(null);
|
||||
case IS_NULL:
|
||||
return criteria.is(null);
|
||||
case NOT_IN:
|
||||
return criteria.nin(nextAsArray(parameters, property));
|
||||
case IN:
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
case LIKE:
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
case CONTAINING:
|
||||
String value = parameters.next().toString();
|
||||
return criteria.regex(toLikeRegex(value, type));
|
||||
case REGEX:
|
||||
return criteria.regex(parameters.next().toString());
|
||||
case EXISTS:
|
||||
return criteria.exists((Boolean) parameters.next());
|
||||
case TRUE:
|
||||
return criteria.is(true);
|
||||
case FALSE:
|
||||
return criteria.is(false);
|
||||
case NEAR:
|
||||
case AFTER:
|
||||
case GREATER_THAN:
|
||||
return criteria.gt(parameters.nextConverted(property));
|
||||
case GREATER_THAN_EQUAL:
|
||||
return criteria.gte(parameters.nextConverted(property));
|
||||
case BEFORE:
|
||||
case LESS_THAN:
|
||||
return criteria.lt(parameters.nextConverted(property));
|
||||
case LESS_THAN_EQUAL:
|
||||
return criteria.lte(parameters.nextConverted(property));
|
||||
case BETWEEN:
|
||||
return criteria.gt(parameters.nextConverted(property)).lt(parameters.nextConverted(property));
|
||||
case IS_NOT_NULL:
|
||||
return criteria.ne(null);
|
||||
case IS_NULL:
|
||||
return criteria.is(null);
|
||||
case NOT_IN:
|
||||
return criteria.nin(nextAsArray(parameters, property));
|
||||
case IN:
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
case LIKE:
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
case CONTAINING:
|
||||
String value = parameters.next().toString();
|
||||
return criteria.regex(toLikeRegex(value, type));
|
||||
case REGEX:
|
||||
return criteria.regex(parameters.next().toString());
|
||||
case EXISTS:
|
||||
return criteria.exists((Boolean) parameters.next());
|
||||
case TRUE:
|
||||
return criteria.is(true);
|
||||
case FALSE:
|
||||
return criteria.is(false);
|
||||
case NEAR:
|
||||
|
||||
Distance distance = accessor.getMaxDistance();
|
||||
Point point = accessor.getGeoNearLocation();
|
||||
point = point == null ? nextAs(parameters, Point.class) : point;
|
||||
Distance distance = accessor.getMaxDistance();
|
||||
Point point = accessor.getGeoNearLocation();
|
||||
point = point == null ? nextAs(parameters, Point.class) : point;
|
||||
|
||||
if (distance == null) {
|
||||
return criteria.near(point);
|
||||
} else {
|
||||
if (distance.getMetric() != null) {
|
||||
criteria.nearSphere(point);
|
||||
if (distance == null) {
|
||||
return criteria.near(point);
|
||||
} else {
|
||||
criteria.near(point);
|
||||
if (distance.getMetric() != null) {
|
||||
criteria.nearSphere(point);
|
||||
} else {
|
||||
criteria.near(point);
|
||||
}
|
||||
criteria.maxDistance(distance.getNormalizedValue());
|
||||
}
|
||||
criteria.maxDistance(distance.getNormalizedValue());
|
||||
}
|
||||
return criteria;
|
||||
return criteria;
|
||||
|
||||
case WITHIN:
|
||||
Object parameter = parameters.next();
|
||||
return criteria.within((Shape) parameter);
|
||||
case SIMPLE_PROPERTY:
|
||||
return criteria.is(parameters.nextConverted(property));
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
return criteria.ne(parameters.nextConverted(property));
|
||||
case WITHIN:
|
||||
Object parameter = parameters.next();
|
||||
return criteria.within((Shape) parameter);
|
||||
case SIMPLE_PROPERTY:
|
||||
return criteria.is(parameters.nextConverted(property));
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
return criteria.ne(parameters.nextConverted(property));
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported keyword!");
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Unsupported keyword!");
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,7 +28,6 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
import org.springframework.data.repository.query.Parameters;
|
||||
import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
@@ -42,9 +41,8 @@ import org.springframework.util.StringUtils;
|
||||
*/
|
||||
public class MongoQueryMethod extends QueryMethod {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final List<Class<?>> GEO_NEAR_RESULTS = Arrays
|
||||
.asList(GeoResult.class, GeoResults.class, GeoPage.class);
|
||||
@SuppressWarnings("unchecked") private static final List<Class<?>> GEO_NEAR_RESULTS = Arrays.asList(GeoResult.class,
|
||||
GeoResults.class, GeoPage.class);
|
||||
|
||||
private final Method method;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -72,7 +70,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
* @see org.springframework.data.repository.query.QueryMethod#getParameters(java.lang.reflect.Method)
|
||||
*/
|
||||
@Override
|
||||
protected Parameters createParameters(Method method) {
|
||||
protected MongoParameters createParameters(Method method) {
|
||||
return new MongoParameters(method, isGeoNearQuery(method));
|
||||
}
|
||||
|
||||
@@ -171,7 +169,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private Query getQueryAnnotation() {
|
||||
Query getQueryAnnotation() {
|
||||
return method.getAnnotation(Query.class);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2010 the original author or authors.
|
||||
* Copyright 2002-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -77,4 +77,13 @@ public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
protected Query createCountQuery(ConvertingParameterAccessor accessor) {
|
||||
return new MongoQueryCreator(tree, accessor, context, false).createQuery();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isCountQuery() {
|
||||
return tree.isCountProjection();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2012 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,10 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Order;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.DBCursor;
|
||||
|
||||
@@ -27,6 +24,7 @@ import com.mongodb.DBCursor;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class QueryUtils {
|
||||
|
||||
private QueryUtils() {
|
||||
@@ -34,51 +32,13 @@ public abstract class QueryUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the given {@link Pageable} to the given {@link Query}. Will do nothing if {@link Pageable} is
|
||||
* {@literal null}.
|
||||
* Turns an {@link Order} into an {@link org.springframework.data.mongodb.core.query.Order}.
|
||||
*
|
||||
* @deprecated use {@link Query#with(Pageable)}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param pageable
|
||||
* @deprecated use {@link Order} directly.
|
||||
* @param order
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public static Query applyPagination(Query query, Pageable pageable) {
|
||||
|
||||
if (pageable == null) {
|
||||
return query;
|
||||
}
|
||||
|
||||
query.limit(pageable.getPageSize());
|
||||
query.skip(pageable.getOffset());
|
||||
|
||||
return query.with(pageable.getSort());
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the given {@link Sort} to the {@link Query}. Will do nothing if {@link Sort} is {@literal null}.
|
||||
*
|
||||
* @deprecated use {@link Query#with(Pageable)}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param sort
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public static Query applySorting(Query query, Sort sort) {
|
||||
|
||||
if (sort == null) {
|
||||
return query;
|
||||
}
|
||||
|
||||
org.springframework.data.mongodb.core.query.Sort bSort = query.sort();
|
||||
|
||||
for (Order order : sort) {
|
||||
bSort.on(order.getProperty(), toOrder(order));
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
public static org.springframework.data.mongodb.core.query.Order toOrder(Order order) {
|
||||
return order.isAscending() ? org.springframework.data.mongodb.core.query.Order.ASCENDING
|
||||
: org.springframework.data.mongodb.core.query.Order.DESCENDING;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -38,17 +38,21 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
|
||||
private final String query;
|
||||
private final String fieldSpec;
|
||||
private final boolean isCountQuery;
|
||||
|
||||
/**
|
||||
* Creates a new {@link StringBasedMongoQuery}.
|
||||
*
|
||||
* @param method
|
||||
* @param template
|
||||
* @param method must not be {@literal null}.
|
||||
* @param template must not be {@literal null}.
|
||||
*/
|
||||
public StringBasedMongoQuery(String query, MongoQueryMethod method, MongoOperations mongoOperations) {
|
||||
|
||||
super(method, mongoOperations);
|
||||
|
||||
this.query = query;
|
||||
this.fieldSpec = method.getFieldSpecification();
|
||||
this.isCountQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().count() : false;
|
||||
}
|
||||
|
||||
public StringBasedMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations) {
|
||||
@@ -82,6 +86,15 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
return query;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isCountQuery() {
|
||||
return isCountQuery;
|
||||
}
|
||||
|
||||
private String replacePlaceholders(String input, ConvertingParameterAccessor accessor) {
|
||||
|
||||
Matcher matcher = PLACEHOLDER.matcher(input);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,12 +22,11 @@ import java.util.Set;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.index.Index;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.data.mongodb.repository.query.MongoEntityMetadata;
|
||||
import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery;
|
||||
import org.springframework.data.mongodb.repository.query.QueryUtils;
|
||||
import org.springframework.data.repository.core.support.QueryCreationListener;
|
||||
import org.springframework.data.repository.query.parser.Part;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
@@ -74,14 +73,14 @@ class IndexEnsuringQueryCreationListener implements QueryCreationListener<PartTr
|
||||
return;
|
||||
}
|
||||
String property = part.getProperty().toDotPath();
|
||||
Order order = toOrder(sort, property);
|
||||
Direction order = toDirection(sort, property);
|
||||
index.on(property, order);
|
||||
}
|
||||
|
||||
// Add fixed sorting criteria to index
|
||||
if (sort != null) {
|
||||
for (Sort.Order order : sort) {
|
||||
index.on(order.getProperty(), QueryUtils.toOrder(order));
|
||||
index.on(order.getProperty(), order.getDirection());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,13 +89,13 @@ class IndexEnsuringQueryCreationListener implements QueryCreationListener<PartTr
|
||||
LOG.debug(String.format("Created %s!", index));
|
||||
}
|
||||
|
||||
private static Order toOrder(Sort sort, String property) {
|
||||
private static Direction toDirection(Sort sort, String property) {
|
||||
|
||||
if (sort == null) {
|
||||
return Order.DESCENDING;
|
||||
return Direction.DESC;
|
||||
}
|
||||
|
||||
org.springframework.data.domain.Sort.Order order = sort.getOrderFor(property);
|
||||
return order == null ? Order.DESCENDING : order.isAscending() ? Order.ASCENDING : Order.DESCENDING;
|
||||
return order == null ? Direction.DESC : order.isAscending() ? Direction.ASC : Direction.DESC;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -39,7 +39,6 @@ import com.mysema.query.apt.DefaultConfiguration;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@SuppressWarnings("restriction")
|
||||
@SupportedAnnotationTypes({ "com.mysema.query.annotations.*", "org.springframework.data.mongodb.core.mapping.*" })
|
||||
@SupportedSourceVersion(SourceVersion.RELEASE_6)
|
||||
public class MongoAnnotationProcessor extends AbstractQuerydslProcessor {
|
||||
|
||||
@@ -49,13 +49,14 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
/**
|
||||
* Creates a ew {@link SimpleMongoRepository} for the given {@link MongoEntityInformation} and {@link MongoTemplate}.
|
||||
*
|
||||
* @param metadata
|
||||
* @param template
|
||||
* @param metadata must not be {@literal null}.
|
||||
* @param template must not be {@literal null}.
|
||||
*/
|
||||
public SimpleMongoRepository(MongoEntityInformation<T, ID> metadata, MongoOperations mongoOperations) {
|
||||
|
||||
Assert.notNull(mongoOperations);
|
||||
Assert.notNull(metadata);
|
||||
|
||||
this.entityInformation = metadata;
|
||||
this.mongoOperations = mongoOperations;
|
||||
}
|
||||
@@ -96,7 +97,7 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
*/
|
||||
public T findOne(ID id) {
|
||||
Assert.notNull(id, "The given id must not be null!");
|
||||
return mongoOperations.findById(id, entityInformation.getJavaType());
|
||||
return mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
private Query getIdQuery(Object id) {
|
||||
@@ -114,11 +115,8 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
public boolean exists(ID id) {
|
||||
|
||||
Assert.notNull(id, "The given id must not be null!");
|
||||
|
||||
final Query idQuery = getIdQuery(id);
|
||||
idQuery.fields();
|
||||
|
||||
return mongoOperations.findOne(idQuery, entityInformation.getJavaType(), entityInformation.getCollectionName()) != null;
|
||||
return mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
|
||||
entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -126,7 +124,6 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
* @see org.springframework.data.repository.CrudRepository#count()
|
||||
*/
|
||||
public long count() {
|
||||
|
||||
return mongoOperations.getCollection(entityInformation.getCollectionName()).count();
|
||||
}
|
||||
|
||||
@@ -136,7 +133,7 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
*/
|
||||
public void delete(ID id) {
|
||||
Assert.notNull(id, "The given id must not be null!");
|
||||
mongoOperations.remove(getIdQuery(id), entityInformation.getJavaType());
|
||||
mongoOperations.remove(getIdQuery(id), entityInformation.getJavaType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -166,7 +163,6 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
* @see org.springframework.data.repository.CrudRepository#deleteAll()
|
||||
*/
|
||||
public void deleteAll() {
|
||||
|
||||
mongoOperations.remove(new Query(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
@@ -227,7 +223,6 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
* @return
|
||||
*/
|
||||
protected MongoOperations getMongoOperations() {
|
||||
|
||||
return this.mongoOperations;
|
||||
}
|
||||
|
||||
@@ -235,7 +230,6 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
* @return the entityInformation
|
||||
*/
|
||||
protected MongoEntityInformation<T, ID> getEntityInformation() {
|
||||
|
||||
return entityInformation;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.DBObject;
|
||||
import com.mysema.query.mongodb.MongodbSerializer;
|
||||
import com.mysema.query.types.Path;
|
||||
import com.mysema.query.types.PathMetadata;
|
||||
import com.mysema.query.types.PathType;
|
||||
|
||||
/**
|
||||
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
|
||||
@@ -61,9 +62,14 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
@Override
|
||||
protected String getKeyForPath(Path<?> expr, PathMetadata<?> metadata) {
|
||||
|
||||
if (!metadata.getPathType().equals(PathType.PROPERTY)) {
|
||||
return super.getKeyForPath(expr, metadata);
|
||||
}
|
||||
|
||||
Path<?> parent = metadata.getParent();
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
|
||||
MongoPersistentProperty property = entity.getPersistentProperty(metadata.getExpression().toString());
|
||||
MongoPersistentProperty property = entity.getPersistentProperty(metadata.getName());
|
||||
|
||||
return property == null ? super.getKeyForPath(expr, metadata) : property.getFieldName();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd=org/springframework/data/mongodb/config/spring-mongo-1.0.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.1.xsd=org/springframework/data/mongodb/config/spring-mongo-1.1.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.2.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.2.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.3.xsd=org/springframework/data/mongodb/config/spring-mongo-1.3.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.3.xsd
|
||||
|
||||
@@ -0,0 +1,601 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<xsd:schema xmlns="http://www.springframework.org/schema/data/mongo"
|
||||
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:beans="http://www.springframework.org/schema/beans"
|
||||
xmlns:tool="http://www.springframework.org/schema/tool"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:repository="http://www.springframework.org/schema/data/repository"
|
||||
targetNamespace="http://www.springframework.org/schema/data/mongo"
|
||||
elementFormDefault="qualified" attributeFormDefault="unqualified">
|
||||
|
||||
<xsd:import namespace="http://www.springframework.org/schema/beans" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/tool" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/context" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/data/repository"
|
||||
schemaLocation="http://www.springframework.org/schema/data/repository/spring-repository.xsd" />
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.Mongo"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="db-factory">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongo instance. If not configured a default com.mongodb.Mongo instance will be created.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="dbname" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the database to connect to. Default is 'db'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="username" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The username to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="password" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The password to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="uri" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The Mongo URI string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:attributeGroup name="mongo-repository-attributes">
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" default="mongoTemplate">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="create-query-indexes" type="xsd:boolean" default="false">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
Enables creation of indexes for queries that get derived from the method name
|
||||
and thus reference domain class properties. Defaults to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:attributeGroup>
|
||||
|
||||
<xsd:element name="repositories">
|
||||
<xsd:complexType>
|
||||
<xsd:complexContent>
|
||||
<xsd:extension base="repository:repositories">
|
||||
<xsd:attributeGroup ref="mongo-repository-attributes"/>
|
||||
<xsd:attributeGroup ref="repository:repository-attributes"/>
|
||||
</xsd:extension>
|
||||
</xsd:complexContent>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="mapping-converter">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[Defines a MongoConverter for getting rich mapping functionality.]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:exports type="org.springframework.data.mongodb.core.convert.MappingMongoConverter" />
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="custom-converters" minOccurs="0">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Top-level element that contains one or more custom converters to be used for mapping
|
||||
domain objects to and from Mongo's DBObject]]>
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="converter" type="customConverterType" minOccurs="0" maxOccurs="unbounded"/>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="base-package" type="xsd:string" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the MappingMongoConverter instance (by default "mappingConverter").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="base-package" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The base package in which to scan for entities annotated with @Document
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="type-mapper-ref" type="typeMapperRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a MongoTypeMapper to be used by this MappingMongoConverter.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mapping.model.MappingContext">
|
||||
The reference to a MappingContext. Will default to 'mappingContext'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="disable-validation" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener">
|
||||
Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="abbreviate-field-names" use="optional" default="false">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy">
|
||||
Enables abbreviating the field names for domain class properties to the
|
||||
first character of their camel case names, e.g. fooBar -> fb.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="jmx">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a JMX Model MBeans for monitoring a MongoDB server'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the Mongo object that determines what server to monitor. (by default "mongo").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="auditing">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="org.springframework.data.mongodb.core.mapping.event.AuditingEventListener" />
|
||||
<tool:exports type="org.springframework.data.auditing.IsNewAwareAuditingHandler" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attributeGroup ref="repository:auditing-attributes" />
|
||||
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:simpleType name="typeMapperRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoTypeMapper"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mappingContextRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mapping.model.MappingContext"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mongoTemplateRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mongoRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="writeConcernEnumeration">
|
||||
<xsd:restriction base="xsd:token">
|
||||
<xsd:enumeration value="NONE" />
|
||||
<xsd:enumeration value="NORMAL" />
|
||||
<xsd:enumeration value="SAFE" />
|
||||
<xsd:enumeration value="FSYNC_SAFE" />
|
||||
<xsd:enumeration value="REPLICAS_SAFE" />
|
||||
<xsd:enumeration value="JOURNAL_SAFE" />
|
||||
<xsd:enumeration value="MAJORITY" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
<!-- MLP
|
||||
<xsd:attributeGroup name="writeConcern">
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:simpleType>
|
||||
<xsd:restriction base="xsd:string">
|
||||
<xsd:enumeration value="NONE" />
|
||||
<xsd:enumeration value="NORMAL" />
|
||||
<xsd:enumeration value="SAFE" />
|
||||
<xsd:enumeration value="FSYNC_SAFE" />
|
||||
<xsd:enumeration value="REPLICA_SAFE" />
|
||||
<xsd:enumeration value="JOURNAL_SAFE" />
|
||||
<xsd:enumeration value="MAJORITY" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:attributeGroup>
|
||||
-->
|
||||
<xsd:complexType name="mongoType">
|
||||
<xsd:sequence minOccurs="0" maxOccurs="1">
|
||||
<xsd:element name="options" type="optionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The Mongo driver options
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.MongoOptions"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<!-- MLP
|
||||
<xsd:attributeGroup ref="writeConcern" />
|
||||
-->
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongo").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="replica-set" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="optionsType">
|
||||
<xsd:attribute name="connections-per-host" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The number of connections allowed per host. Will block if run out. Default is 10. System property MONGO.POOLSIZE can override
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="threads-allowed-to-block-for-connection-multiplier" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The multiplier for connectionsPerHost for # of threads that can block. Default is 5.
|
||||
If connectionsPerHost is 10, and threadsAllowedToBlockForConnectionMultiplier is 5,
|
||||
then 50 threads can block more than that and an exception will be thrown.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-wait-time" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="connect-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The connect timeout in milliseconds. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The socket timeout. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-keep-alive" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="auto-connect-retry" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls whether or not on a connect, the system retries automatically. Default is false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-auto-connect-retry-time" type="xsd:long">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The maximum amount of time in millisecons to spend retrying to open connection to the same server. Default is 0, which means to use the default 15s if autoConnectRetry is on.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-number" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This specifies the number of servers to wait for on the write operation, and exception raising behavior. The 'w' option to the getlasterror command. Defaults to 0.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls timeout for write operations in milliseconds. The 'wtimeout' option to the getlasterror command. Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-fsync" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="slave-ok" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls if the driver is allowed to read from secondaries or slaves. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:group name="beanElementGroup">
|
||||
<xsd:choice>
|
||||
<xsd:element ref="beans:bean"/>
|
||||
<xsd:element ref="beans:ref"/>
|
||||
</xsd:choice>
|
||||
</xsd:group>
|
||||
|
||||
<xsd:complexType name="customConverterType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Element defining a custom converterr.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:group ref="beanElementGroup" minOccurs="0" maxOccurs="1"/>
|
||||
<xsd:attribute name="ref" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
A reference to a custom converter.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref"/>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:simpleType name="converterRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:element name="template">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="converter-ref" type="converterRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongoconverter instance.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string"
|
||||
use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to
|
||||
type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="gridFsTemplate">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="converter-ref" type="converterRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongoconverter instance.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:schema>
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,9 +18,16 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoTypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
@@ -33,9 +40,12 @@ import com.mongodb.Mongo;
|
||||
* Unit tests for {@link AbstractMongoConfiguration}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class AbstractMongoConfigurationUnitTests {
|
||||
|
||||
@Rule public ExpectedException exception = ExpectedException.none();
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-496
|
||||
*/
|
||||
@@ -68,6 +78,34 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
assertScanningDisabled(" ");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-569
|
||||
*/
|
||||
@Test
|
||||
public void containsMongoDbFactoryButNoMongoBean() {
|
||||
|
||||
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
|
||||
|
||||
assertThat(context.getBean(MongoDbFactory.class), is(notNullValue()));
|
||||
|
||||
exception.expect(NoSuchBeanDefinitionException.class);
|
||||
context.getBean(Mongo.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void returnsUninitializedMappingContext() throws Exception {
|
||||
|
||||
SampleMongoConfiguration configuration = new SampleMongoConfiguration();
|
||||
MongoMappingContext context = configuration.mongoMappingContext();
|
||||
|
||||
assertThat(context.getPersistentEntities(), is(emptyIterable()));
|
||||
context.initialize();
|
||||
assertThat(context.getPersistentEntities(), is(not(emptyIterable())));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-717
|
||||
*/
|
||||
@Test
|
||||
public void lifecycleCallbacksAreInvokedInAppropriateOrder() {
|
||||
|
||||
@@ -79,6 +117,20 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
assertThat(spElContext.getBeanResolver(), is(notNullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-725
|
||||
*/
|
||||
@Test
|
||||
public void shouldBeAbleToConfigureCustomTypeMapperViaJavaConfig() {
|
||||
|
||||
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
|
||||
MongoTypeMapper typeMapper = context.getBean(CustomMongoTypeMapper.class);
|
||||
MappingMongoConverter mmc = context.getBean(MappingMongoConverter.class);
|
||||
|
||||
assertThat(mmc, is(notNullValue()));
|
||||
assertThat(mmc.getTypeMapper(), is(typeMapper));
|
||||
}
|
||||
|
||||
private static void assertScanningDisabled(final String value) throws ClassNotFoundException {
|
||||
|
||||
AbstractMongoConfiguration configuration = new SampleMongoConfiguration() {
|
||||
@@ -92,6 +144,7 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
assertThat(configuration.getInitialEntitySet(), hasSize(0));
|
||||
}
|
||||
|
||||
@Configuration
|
||||
static class SampleMongoConfiguration extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
@@ -99,11 +152,23 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
return "database";
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new Mongo();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Override
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
MappingMongoConverter mmc = super.mappingMongoConverter();
|
||||
mmc.setTypeMapper(typeMapper());
|
||||
return mmc;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MongoTypeMapper typeMapper() {
|
||||
return new CustomMongoTypeMapper();
|
||||
}
|
||||
}
|
||||
|
||||
@Document
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
class CustomMongoTypeMapper extends DefaultMongoTypeMapper {}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,7 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Collections;
|
||||
@@ -23,6 +23,7 @@ import java.util.Set;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
|
||||
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
@@ -30,7 +31,10 @@ import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.converter.GenericConverter;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoTypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.Account;
|
||||
import org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.repository.Person;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@@ -40,6 +44,7 @@ import com.mongodb.DBObject;
|
||||
* Integration tests for {@link MappingMongoConverterParser}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingMongoConverterParserIntegrationTests {
|
||||
|
||||
@@ -59,6 +64,15 @@ public class MappingMongoConverterParserIntegrationTests {
|
||||
factory.getBean("converter");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void hasCustomTypeMapper() {
|
||||
|
||||
MappingMongoConverter converter = factory.getBean("converter", MappingMongoConverter.class);
|
||||
MongoTypeMapper customMongoTypeMapper = factory.getBean(CustomMongoTypeMapper.class);
|
||||
|
||||
assertThat(converter.getTypeMapper(), is(customMongoTypeMapper));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void scansForConverterAndSetsUpCustomConversionsAccordingly() {
|
||||
|
||||
@@ -67,6 +81,20 @@ public class MappingMongoConverterParserIntegrationTests {
|
||||
assertThat(conversions.hasCustomWriteTarget(Account.class), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-607
|
||||
*/
|
||||
@Test
|
||||
public void activatesAbbreviatingPropertiesCorrectly() {
|
||||
|
||||
BeanDefinition definition = factory.getBeanDefinition("abbreviatingConverter.mappingContext");
|
||||
Object value = definition.getPropertyValues().getPropertyValue("fieldNamingStrategy").getValue();
|
||||
|
||||
assertThat(value, is(instanceOf(BeanDefinition.class)));
|
||||
BeanDefinition strategy = (BeanDefinition) value;
|
||||
assertThat(strategy.getBeanClassName(), is(CamelCaseAbbreviatingFieldNamingStrategy.class.getName()));
|
||||
}
|
||||
|
||||
@Component
|
||||
public static class SampleConverter implements Converter<Person, DBObject> {
|
||||
public DBObject convert(Person source) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -33,6 +33,7 @@ import org.springframework.core.io.ClassPathResource;
|
||||
*
|
||||
* @see DATAMONGO-36
|
||||
* @author Maciej Walkowiak
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingMongoConverterParserValidationIntegrationTests {
|
||||
|
||||
@@ -65,4 +66,11 @@ public class MappingMongoConverterParserValidationIntegrationTests {
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-validation-disabled.xml"));
|
||||
factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void validatingEventListenerCreatedWithCustomTypeMapperConfig() {
|
||||
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-custom-typeMapper.xml"));
|
||||
assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER), is(not(nullValue())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,7 +13,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
@@ -26,12 +25,23 @@ import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.gridfs.GridFsOperations;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Integration tests for the MongoDB namespace.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceTests {
|
||||
@@ -58,7 +68,7 @@ public class MongoNamespaceTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSecondMongoDbFactory() throws Exception {
|
||||
public void testSecondMongoDbFactory() {
|
||||
assertTrue(ctx.containsBean("secondMongoDbFactory"));
|
||||
MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("secondMongoDbFactory");
|
||||
Mongo mongo = (Mongo) getField(dbf, "mongo");
|
||||
@@ -68,6 +78,58 @@ public class MongoNamespaceTests {
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-140
|
||||
*/
|
||||
@Test
|
||||
public void testMongoTemplateFactory() {
|
||||
assertTrue(ctx.containsBean("mongoTemplate"));
|
||||
MongoOperations operations = (MongoOperations) ctx.getBean("mongoTemplate");
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "mongoConverter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-140
|
||||
*/
|
||||
@Test
|
||||
public void testSecondMongoTemplateFactory() {
|
||||
assertTrue(ctx.containsBean("anotherMongoTemplate"));
|
||||
MongoOperations operations = (MongoOperations) ctx.getBean("anotherMongoTemplate");
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
WriteConcern writeConcern = (WriteConcern) getField(operations, "writeConcern");
|
||||
assertEquals(WriteConcern.SAFE, writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-628
|
||||
*/
|
||||
@Test
|
||||
public void testGridFsTemplateFactory() {
|
||||
assertTrue(ctx.containsBean("gridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("gridFsTemplate");
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-628
|
||||
*/
|
||||
@Test
|
||||
public void testSecondGridFsTemplateFactory() {
|
||||
assertTrue(ctx.containsBean("antoherGridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("antoherGridFsTemplate");
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testMongoSingletonWithPropertyPlaceHolders() throws Exception {
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.annotation.Id;
|
||||
|
||||
public class BaseDoc {
|
||||
@Id String id;
|
||||
String value;
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,6 +20,8 @@ import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -28,7 +30,9 @@ import org.mockito.Mock;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.springframework.transaction.support.TransactionSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationUtils;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
@@ -37,12 +41,12 @@ import com.mongodb.Mongo;
|
||||
* Unit tests for {@link MongoDbUtils}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Randy Watler
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class MongoDbUtilsUnitTests {
|
||||
|
||||
@Mock
|
||||
Mongo mongo;
|
||||
@Mock Mongo mongo;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
@@ -81,4 +85,94 @@ public class MongoDbUtilsUnitTests {
|
||||
assertThat(first, is(notNullValue()));
|
||||
assertThat(MongoDbUtils.getDB(mongo, "first"), is(sameInstance(first)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-737
|
||||
*/
|
||||
@Test
|
||||
public void handlesTransactionSynchronizationLifecycle() {
|
||||
|
||||
// ensure transaction synchronization manager has no registered
|
||||
// transaction synchronizations or bound resources at start of test
|
||||
assertThat(TransactionSynchronizationManager.getSynchronizations().isEmpty(), is(true));
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true));
|
||||
|
||||
// access database for one mongo instance, (registers transaction
|
||||
// synchronization and binds transaction resource)
|
||||
MongoDbUtils.getDB(mongo, "first");
|
||||
|
||||
// ensure transaction synchronization manager has registered
|
||||
// transaction synchronizations and bound resources
|
||||
assertThat(TransactionSynchronizationManager.getSynchronizations().isEmpty(), is(false));
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(false));
|
||||
|
||||
// simulate transaction completion, (unbinds transaction resource)
|
||||
try {
|
||||
simulateTransactionCompletion();
|
||||
} catch (Exception e) {
|
||||
fail("Unexpected exception thrown during transaction completion: " + e);
|
||||
}
|
||||
|
||||
// ensure transaction synchronization manager has no bound resources
|
||||
// at end of test
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-737
|
||||
*/
|
||||
@Test
|
||||
public void handlesTransactionSynchronizationsLifecycle() {
|
||||
|
||||
// ensure transaction synchronization manager has no registered
|
||||
// transaction synchronizations or bound resources at start of test
|
||||
assertThat(TransactionSynchronizationManager.getSynchronizations().isEmpty(), is(true));
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true));
|
||||
|
||||
// access multiple databases for one mongo instance, (registers
|
||||
// transaction synchronizations and binds transaction resources)
|
||||
MongoDbUtils.getDB(mongo, "first");
|
||||
MongoDbUtils.getDB(mongo, "second");
|
||||
|
||||
// ensure transaction synchronization manager has registered
|
||||
// transaction synchronizations and bound resources
|
||||
assertThat(TransactionSynchronizationManager.getSynchronizations().isEmpty(), is(false));
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(false));
|
||||
|
||||
// simulate transaction completion, (unbinds transaction resources)
|
||||
try {
|
||||
simulateTransactionCompletion();
|
||||
} catch (Exception e) {
|
||||
fail("Unexpected exception thrown during transaction completion: " + e);
|
||||
}
|
||||
|
||||
// ensure transaction synchronization manager has no bound
|
||||
// transaction resources at end of test
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulate transaction rollback/commit completion protocol on managed transaction synchronizations which will unbind
|
||||
* managed transaction resources. Does not swallow exceptions for testing purposes.
|
||||
*
|
||||
* @see TransactionSynchronizationUtils#triggerBeforeCompletion()
|
||||
* @see TransactionSynchronizationUtils#triggerAfterCompletion(int)
|
||||
*/
|
||||
private void simulateTransactionCompletion() {
|
||||
|
||||
// triggerBeforeCompletion() implementation without swallowed exceptions
|
||||
List<TransactionSynchronization> synchronizations = TransactionSynchronizationManager.getSynchronizations();
|
||||
for (TransactionSynchronization synchronization : synchronizations) {
|
||||
synchronization.beforeCompletion();
|
||||
}
|
||||
|
||||
// triggerAfterCompletion() implementation without swallowed exceptions
|
||||
List<TransactionSynchronization> remainingSynchronizations = TransactionSynchronizationManager
|
||||
.getSynchronizations();
|
||||
if (remainingSynchronizations != null) {
|
||||
for (TransactionSynchronization remainingSynchronization : remainingSynchronizations) {
|
||||
remainingSynchronization.afterCompletion(TransactionSynchronization.STATUS_ROLLED_BACK);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.AbstractMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoTypeMapper;
|
||||
import org.springframework.data.mongodb.core.geo.Point;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
@@ -45,14 +46,13 @@ import com.mongodb.DBRef;
|
||||
* instances of their implementation and thus can see if it correctly implements the {@link MongoOperations} interface.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public abstract class MongoOperationsUnitTests {
|
||||
|
||||
@Mock
|
||||
CollectionCallback<Object> collectionCallback;
|
||||
@Mock
|
||||
DbCallback<Object> dbCallback;
|
||||
@Mock CollectionCallback<Object> collectionCallback;
|
||||
@Mock DbCallback<Object> dbCallback;
|
||||
|
||||
MongoConverter converter;
|
||||
Person person;
|
||||
@@ -86,6 +86,11 @@ public abstract class MongoOperationsUnitTests {
|
||||
public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoTypeMapper getTypeMapper() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
import static org.springframework.data.mongodb.core.query.Update.*;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
@@ -50,9 +51,10 @@ import org.springframework.dao.OptimisticLockingFailureException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.MongoDataIntegrityViolationException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
@@ -64,7 +66,6 @@ import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
@@ -89,6 +90,7 @@ import com.mongodb.WriteResult;
|
||||
* @author Amol Nayak
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Komi Innocent
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
@@ -112,8 +114,8 @@ public class MongoTemplateTests {
|
||||
mappingContext.setInitialEntitySet(new HashSet<Class<?>>(Arrays.asList(PersonWith_idPropertyOfTypeObjectId.class,
|
||||
PersonWith_idPropertyOfTypeString.class, PersonWithIdPropertyOfTypeObjectId.class,
|
||||
PersonWithIdPropertyOfTypeString.class, PersonWithIdPropertyOfTypeInteger.class,
|
||||
PersonWithIdPropertyOfPrimitiveInt.class, PersonWithIdPropertyOfTypeLong.class,
|
||||
PersonWithIdPropertyOfPrimitiveLong.class)));
|
||||
PersonWithIdPropertyOfTypeBigInteger.class, PersonWithIdPropertyOfPrimitiveInt.class,
|
||||
PersonWithIdPropertyOfTypeLong.class, PersonWithIdPropertyOfPrimitiveLong.class)));
|
||||
mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
|
||||
mappingContext.initialize();
|
||||
|
||||
@@ -142,6 +144,7 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(PersonWithIdPropertyOfTypeObjectId.class);
|
||||
template.dropCollection(PersonWithIdPropertyOfTypeString.class);
|
||||
template.dropCollection(PersonWithIdPropertyOfTypeInteger.class);
|
||||
template.dropCollection(PersonWithIdPropertyOfTypeBigInteger.class);
|
||||
template.dropCollection(PersonWithIdPropertyOfPrimitiveInt.class);
|
||||
template.dropCollection(PersonWithIdPropertyOfTypeLong.class);
|
||||
template.dropCollection(PersonWithIdPropertyOfPrimitiveLong.class);
|
||||
@@ -153,6 +156,10 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(TypeWithDate.class);
|
||||
template.dropCollection("collection");
|
||||
template.dropCollection("personX");
|
||||
template.dropCollection(Document.class);
|
||||
template.dropCollection(ObjectWith3AliasedFields.class);
|
||||
template.dropCollection(ObjectWith3AliasedFieldsAndNestedAddress.class);
|
||||
template.dropCollection(BaseDoc.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -237,7 +244,7 @@ public class MongoTemplateTests {
|
||||
|
||||
MongoTemplate template = new MongoTemplate(factory);
|
||||
template.setWriteResultChecking(WriteResultChecking.EXCEPTION);
|
||||
template.indexOps(Person.class).ensureIndex(new Index().on("firstName", Order.DESCENDING).unique());
|
||||
template.indexOps(Person.class).ensureIndex(new Index().on("firstName", Direction.DESC).unique());
|
||||
|
||||
Person person = new Person(new ObjectId(), "Amol");
|
||||
person.setAge(28);
|
||||
@@ -294,7 +301,7 @@ public class MongoTemplateTests {
|
||||
p2.setAge(40);
|
||||
template.insert(p2);
|
||||
|
||||
template.indexOps(Person.class).ensureIndex(new Index().on("age", Order.DESCENDING).unique(Duplicates.DROP));
|
||||
template.indexOps(Person.class).ensureIndex(new Index().on("age", Direction.DESC).unique(Duplicates.DROP));
|
||||
|
||||
DBCollection coll = template.getCollection(template.getCollectionName(Person.class));
|
||||
List<DBObject> indexInfo = coll.getIndexInfo();
|
||||
@@ -324,7 +331,44 @@ public class MongoTemplateTests {
|
||||
List<IndexField> indexFields = ii.getIndexFields();
|
||||
IndexField field = indexFields.get(0);
|
||||
|
||||
assertThat(field, is(IndexField.create("age", Order.DESCENDING)));
|
||||
assertThat(field, is(IndexField.create("age", Direction.DESC)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-746
|
||||
*/
|
||||
@Test
|
||||
public void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() throws Exception {
|
||||
|
||||
String command = "db." + template.getCollectionName(Person.class)
|
||||
+ ".ensureIndex({'age':-1}, {'unique':true, 'sparse':true})";
|
||||
template.indexOps(Person.class).dropAllIndexes();
|
||||
|
||||
assertThat(template.indexOps(Person.class).getIndexInfo().isEmpty(), is(true));
|
||||
factory.getDb().eval(command);
|
||||
|
||||
List<DBObject> indexInfo = template.getCollection(template.getCollectionName(Person.class)).getIndexInfo();
|
||||
String indexKey = null;
|
||||
boolean unique = false;
|
||||
|
||||
for (DBObject ix : indexInfo) {
|
||||
if ("age_-1".equals(ix.get("name"))) {
|
||||
indexKey = ix.get("key").toString();
|
||||
unique = (Boolean) ix.get("unique");
|
||||
}
|
||||
}
|
||||
|
||||
assertThat(indexKey, is("{ \"age\" : -1.0}"));
|
||||
assertThat(unique, is(true));
|
||||
|
||||
IndexInfo info = template.indexOps(Person.class).getIndexInfo().get(1);
|
||||
assertThat(info.isUnique(), is(true));
|
||||
assertThat(info.isSparse(), is(true));
|
||||
|
||||
List<IndexField> indexFields = info.getIndexFields();
|
||||
IndexField field = indexFields.get(0);
|
||||
|
||||
assertThat(field, is(IndexField.create("age", Direction.DESC)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -474,6 +518,25 @@ public class MongoTemplateTests {
|
||||
assertThat(p9q.getId(), is(p9.getId()));
|
||||
checkCollectionContents(PersonWithIdPropertyOfTypeInteger.class, 1);
|
||||
|
||||
/*
|
||||
* @see DATAMONGO-602
|
||||
*/
|
||||
// BigInteger id - provided
|
||||
PersonWithIdPropertyOfTypeBigInteger p9bi = new PersonWithIdPropertyOfTypeBigInteger();
|
||||
p9bi.setFirstName("Sven_9bi");
|
||||
p9bi.setAge(22);
|
||||
p9bi.setId(BigInteger.valueOf(12345));
|
||||
// insert
|
||||
mongoTemplate.insert(p9bi);
|
||||
// also try save
|
||||
mongoTemplate.save(p9bi);
|
||||
assertThat(p9bi.getId(), notNullValue());
|
||||
PersonWithIdPropertyOfTypeBigInteger p9qbi = mongoTemplate.findOne(new Query(where("id").in(p9bi.getId())),
|
||||
PersonWithIdPropertyOfTypeBigInteger.class);
|
||||
assertThat(p9qbi, notNullValue());
|
||||
assertThat(p9qbi.getId(), is(p9bi.getId()));
|
||||
checkCollectionContents(PersonWithIdPropertyOfTypeBigInteger.class, 1);
|
||||
|
||||
// int id - provided
|
||||
PersonWithIdPropertyOfPrimitiveInt p10 = new PersonWithIdPropertyOfPrimitiveInt();
|
||||
p10.setFirstName("Sven_10");
|
||||
@@ -698,6 +761,47 @@ public class MongoTemplateTests {
|
||||
assertThat(results3.size(), is(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-602
|
||||
*/
|
||||
@Test
|
||||
public void testUsingAnInQueryWithBigIntegerId() throws Exception {
|
||||
|
||||
template.remove(new Query(), PersonWithIdPropertyOfTypeBigInteger.class);
|
||||
|
||||
PersonWithIdPropertyOfTypeBigInteger p1 = new PersonWithIdPropertyOfTypeBigInteger();
|
||||
p1.setFirstName("Sven");
|
||||
p1.setAge(11);
|
||||
p1.setId(new BigInteger("2666666666666666665069473312490162649510603601"));
|
||||
template.insert(p1);
|
||||
PersonWithIdPropertyOfTypeBigInteger p2 = new PersonWithIdPropertyOfTypeBigInteger();
|
||||
p2.setFirstName("Mary");
|
||||
p2.setAge(21);
|
||||
p2.setId(new BigInteger("2666666666666666665069473312490162649510603602"));
|
||||
template.insert(p2);
|
||||
PersonWithIdPropertyOfTypeBigInteger p3 = new PersonWithIdPropertyOfTypeBigInteger();
|
||||
p3.setFirstName("Ann");
|
||||
p3.setAge(31);
|
||||
p3.setId(new BigInteger("2666666666666666665069473312490162649510603603"));
|
||||
template.insert(p3);
|
||||
PersonWithIdPropertyOfTypeBigInteger p4 = new PersonWithIdPropertyOfTypeBigInteger();
|
||||
p4.setFirstName("John");
|
||||
p4.setAge(41);
|
||||
p4.setId(new BigInteger("2666666666666666665069473312490162649510603604"));
|
||||
template.insert(p4);
|
||||
|
||||
Query q1 = new Query(Criteria.where("age").in(11, 21, 41));
|
||||
List<PersonWithIdPropertyOfTypeBigInteger> results1 = template.find(q1, PersonWithIdPropertyOfTypeBigInteger.class);
|
||||
Query q2 = new Query(Criteria.where("firstName").in("Ann", "Mary"));
|
||||
List<PersonWithIdPropertyOfTypeBigInteger> results2 = template.find(q2, PersonWithIdPropertyOfTypeBigInteger.class);
|
||||
Query q3 = new Query(Criteria.where("id").in(new BigInteger("2666666666666666665069473312490162649510603601"),
|
||||
new BigInteger("2666666666666666665069473312490162649510603604")));
|
||||
List<PersonWithIdPropertyOfTypeBigInteger> results3 = template.find(q3, PersonWithIdPropertyOfTypeBigInteger.class);
|
||||
assertThat(results1.size(), is(3));
|
||||
assertThat(results2.size(), is(2));
|
||||
assertThat(results3.size(), is(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUsingAnInQueryWithPrimitiveIntId() throws Exception {
|
||||
|
||||
@@ -950,7 +1054,7 @@ public class MongoTemplateTests {
|
||||
|
||||
// test query with a sort
|
||||
Query q2 = new Query(Criteria.where("age").gt(10));
|
||||
q2.sort().on("age", Order.DESCENDING);
|
||||
q2.with(new Sort(Direction.DESC, "age"));
|
||||
PersonWithAList p5 = template.findOne(q2, PersonWithAList.class);
|
||||
assertThat(p5.getFirstName(), is("Mark"));
|
||||
}
|
||||
@@ -1612,6 +1716,19 @@ public class MongoTemplateTests {
|
||||
assertThat(result.get(0).containsField("first"), is(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void executesExistsCorrectly() {
|
||||
|
||||
Sample sample = new Sample();
|
||||
template.save(sample);
|
||||
|
||||
Query query = query(where("id").is(sample.id));
|
||||
|
||||
assertThat(template.exists(query, Sample.class), is(true));
|
||||
assertThat(template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class)), is(true));
|
||||
assertThat(template.exists(query, Sample.class, template.getCollectionName(Sample.class)), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-675
|
||||
*/
|
||||
@@ -1668,6 +1785,301 @@ public class MongoTemplateTests {
|
||||
assertThat(result.id, is(idValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-392
|
||||
*/
|
||||
@Test
|
||||
public void updatesShouldRetainTypeInformation() {
|
||||
|
||||
Document doc = new Document();
|
||||
doc.id = "4711";
|
||||
doc.model = new ModelA().withValue("foo");
|
||||
template.insert(doc);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(doc.id));
|
||||
String newModelValue = "bar";
|
||||
Update update = Update.update("model", new ModelA().withValue(newModelValue));
|
||||
template.updateFirst(query, update, Document.class);
|
||||
|
||||
Document result = template.findOne(query, Document.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.id, is(doc.id));
|
||||
assertThat(result.model, is(notNullValue()));
|
||||
assertThat(result.model.value(), is(newModelValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-702
|
||||
*/
|
||||
@Test
|
||||
public void queryShouldSupportRealAndAliasedPropertyNamesForFieldInclusions() {
|
||||
|
||||
ObjectWith3AliasedFields obj = new ObjectWith3AliasedFields();
|
||||
obj.id = "4711";
|
||||
obj.property1 = "P1";
|
||||
obj.property2 = "P2";
|
||||
obj.property3 = "P3";
|
||||
|
||||
template.insert(obj);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(obj.id));
|
||||
query.fields() //
|
||||
.include("property2") // real property name
|
||||
.include("prop3"); // aliased property name
|
||||
|
||||
ObjectWith3AliasedFields result = template.findOne(query, ObjectWith3AliasedFields.class);
|
||||
|
||||
assertThat(result.id, is(obj.id));
|
||||
assertThat(result.property1, is(nullValue()));
|
||||
assertThat(result.property2, is(obj.property2));
|
||||
assertThat(result.property3, is(obj.property3));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-702
|
||||
*/
|
||||
@Test
|
||||
public void queryShouldSupportRealAndAliasedPropertyNamesForFieldExclusions() {
|
||||
|
||||
ObjectWith3AliasedFields obj = new ObjectWith3AliasedFields();
|
||||
obj.id = "4711";
|
||||
obj.property1 = "P1";
|
||||
obj.property2 = "P2";
|
||||
obj.property3 = "P3";
|
||||
|
||||
template.insert(obj);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(obj.id));
|
||||
query.fields() //
|
||||
.exclude("property2") // real property name
|
||||
.exclude("prop3"); // aliased property name
|
||||
|
||||
ObjectWith3AliasedFields result = template.findOne(query, ObjectWith3AliasedFields.class);
|
||||
|
||||
assertThat(result.id, is(obj.id));
|
||||
assertThat(result.property1, is(obj.property1));
|
||||
assertThat(result.property2, is(nullValue()));
|
||||
assertThat(result.property3, is(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-702
|
||||
*/
|
||||
@Test
|
||||
public void findMultipleWithQueryShouldSupportRealAndAliasedPropertyNamesForFieldExclusions() {
|
||||
|
||||
ObjectWith3AliasedFields obj0 = new ObjectWith3AliasedFields();
|
||||
obj0.id = "4711";
|
||||
obj0.property1 = "P10";
|
||||
obj0.property2 = "P20";
|
||||
obj0.property3 = "P30";
|
||||
ObjectWith3AliasedFields obj1 = new ObjectWith3AliasedFields();
|
||||
obj1.id = "4712";
|
||||
obj1.property1 = "P11";
|
||||
obj1.property2 = "P21";
|
||||
obj1.property3 = "P31";
|
||||
|
||||
template.insert(obj0);
|
||||
template.insert(obj1);
|
||||
|
||||
Query query = new Query(Criteria.where("id").in(obj0.id, obj1.id));
|
||||
query.fields() //
|
||||
.exclude("property2") // real property name
|
||||
.exclude("prop3"); // aliased property name
|
||||
|
||||
List<ObjectWith3AliasedFields> results = template.find(query, ObjectWith3AliasedFields.class);
|
||||
|
||||
assertThat(results, is(notNullValue()));
|
||||
assertThat(results.size(), is(2));
|
||||
|
||||
ObjectWith3AliasedFields result0 = results.get(0);
|
||||
assertThat(result0, is(notNullValue()));
|
||||
assertThat(result0.id, is(obj0.id));
|
||||
assertThat(result0.property1, is(obj0.property1));
|
||||
assertThat(result0.property2, is(nullValue()));
|
||||
assertThat(result0.property3, is(nullValue()));
|
||||
|
||||
ObjectWith3AliasedFields result1 = results.get(1);
|
||||
assertThat(result1, is(notNullValue()));
|
||||
assertThat(result1.id, is(obj1.id));
|
||||
assertThat(result1.property1, is(obj1.property1));
|
||||
assertThat(result1.property2, is(nullValue()));
|
||||
assertThat(result1.property3, is(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-702
|
||||
*/
|
||||
@Test
|
||||
public void queryShouldSupportNestedPropertyNamesForFieldInclusions() {
|
||||
|
||||
ObjectWith3AliasedFieldsAndNestedAddress obj = new ObjectWith3AliasedFieldsAndNestedAddress();
|
||||
obj.id = "4711";
|
||||
obj.property1 = "P1";
|
||||
obj.property2 = "P2";
|
||||
obj.property3 = "P3";
|
||||
Address address = new Address();
|
||||
String stateValue = "WA";
|
||||
address.state = stateValue;
|
||||
address.city = "Washington";
|
||||
obj.address = address;
|
||||
|
||||
template.insert(obj);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(obj.id));
|
||||
query.fields() //
|
||||
.include("property2") // real property name
|
||||
.include("address.state"); // aliased property name
|
||||
|
||||
ObjectWith3AliasedFieldsAndNestedAddress result = template.findOne(query,
|
||||
ObjectWith3AliasedFieldsAndNestedAddress.class);
|
||||
|
||||
assertThat(result.id, is(obj.id));
|
||||
assertThat(result.property1, is(nullValue()));
|
||||
assertThat(result.property2, is(obj.property2));
|
||||
assertThat(result.property3, is(nullValue()));
|
||||
assertThat(result.address, is(notNullValue()));
|
||||
assertThat(result.address.city, is(nullValue()));
|
||||
assertThat(result.address.state, is(stateValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-709
|
||||
*/
|
||||
@Test
|
||||
public void aQueryRestrictedWithOneRestrictedResultTypeShouldReturnOnlyInstancesOfTheRestrictedType() {
|
||||
|
||||
BaseDoc doc0 = new BaseDoc();
|
||||
doc0.value = "foo";
|
||||
SpecialDoc doc1 = new SpecialDoc();
|
||||
doc1.value = "foo";
|
||||
doc1.specialValue = "specialfoo";
|
||||
VerySpecialDoc doc2 = new VerySpecialDoc();
|
||||
doc2.value = "foo";
|
||||
doc2.specialValue = "specialfoo";
|
||||
doc2.verySpecialValue = 4711;
|
||||
|
||||
String collectionName = template.getCollectionName(BaseDoc.class);
|
||||
template.insert(doc0, collectionName);
|
||||
template.insert(doc1, collectionName);
|
||||
template.insert(doc2, collectionName);
|
||||
|
||||
Query query = Query.query(where("value").is("foo")).restrict(SpecialDoc.class);
|
||||
List<BaseDoc> result = template.find(query, BaseDoc.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.size(), is(1));
|
||||
assertThat(result.get(0), is(instanceOf(SpecialDoc.class)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-709
|
||||
*/
|
||||
@Test
|
||||
public void aQueryRestrictedWithMultipleRestrictedResultTypesShouldReturnOnlyInstancesOfTheRestrictedTypes() {
|
||||
|
||||
BaseDoc doc0 = new BaseDoc();
|
||||
doc0.value = "foo";
|
||||
SpecialDoc doc1 = new SpecialDoc();
|
||||
doc1.value = "foo";
|
||||
doc1.specialValue = "specialfoo";
|
||||
VerySpecialDoc doc2 = new VerySpecialDoc();
|
||||
doc2.value = "foo";
|
||||
doc2.specialValue = "specialfoo";
|
||||
doc2.verySpecialValue = 4711;
|
||||
|
||||
String collectionName = template.getCollectionName(BaseDoc.class);
|
||||
template.insert(doc0, collectionName);
|
||||
template.insert(doc1, collectionName);
|
||||
template.insert(doc2, collectionName);
|
||||
|
||||
Query query = Query.query(where("value").is("foo")).restrict(BaseDoc.class, VerySpecialDoc.class);
|
||||
List<BaseDoc> result = template.find(query, BaseDoc.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.size(), is(2));
|
||||
assertThat(result.get(0).getClass(), is((Object) BaseDoc.class));
|
||||
assertThat(result.get(1).getClass(), is((Object) VerySpecialDoc.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-709
|
||||
*/
|
||||
@Test
|
||||
public void aQueryWithNoRestrictedResultTypesShouldReturnAllInstancesWithinTheGivenCollection() {
|
||||
|
||||
BaseDoc doc0 = new BaseDoc();
|
||||
doc0.value = "foo";
|
||||
SpecialDoc doc1 = new SpecialDoc();
|
||||
doc1.value = "foo";
|
||||
doc1.specialValue = "specialfoo";
|
||||
VerySpecialDoc doc2 = new VerySpecialDoc();
|
||||
doc2.value = "foo";
|
||||
doc2.specialValue = "specialfoo";
|
||||
doc2.verySpecialValue = 4711;
|
||||
|
||||
String collectionName = template.getCollectionName(BaseDoc.class);
|
||||
template.insert(doc0, collectionName);
|
||||
template.insert(doc1, collectionName);
|
||||
template.insert(doc2, collectionName);
|
||||
|
||||
Query query = Query.query(where("value").is("foo"));
|
||||
List<BaseDoc> result = template.find(query, BaseDoc.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.size(), is(3));
|
||||
assertThat(result.get(0).getClass(), is((Object) BaseDoc.class));
|
||||
assertThat(result.get(1).getClass(), is((Object) SpecialDoc.class));
|
||||
assertThat(result.get(2).getClass(), is((Object) VerySpecialDoc.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-771
|
||||
*/
|
||||
@Test
|
||||
public void allowInsertWithPlainJsonString() {
|
||||
|
||||
String id = "4711";
|
||||
String value = "bubu";
|
||||
String json = String.format("{_id:%s, field: '%s'}", id, value);
|
||||
|
||||
template.insert(json, "sample");
|
||||
List<Sample> result = template.findAll(Sample.class);
|
||||
|
||||
assertThat(result.size(), is(1));
|
||||
assertThat(result.get(0).id, is(id));
|
||||
assertThat(result.get(0).field, is(value));
|
||||
}
|
||||
|
||||
static interface Model {
|
||||
String value();
|
||||
|
||||
Model withValue(String value);
|
||||
}
|
||||
|
||||
static class ModelA implements Model {
|
||||
|
||||
private String value;
|
||||
|
||||
@Override
|
||||
public String value() {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Model withValue(String value) {
|
||||
this.value = value;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
static class Document {
|
||||
|
||||
@Id public String id;
|
||||
public Model model;
|
||||
}
|
||||
|
||||
static class MyId {
|
||||
|
||||
String first;
|
||||
@@ -1679,7 +2091,7 @@ public class MongoTemplateTests {
|
||||
@Id MyId id;
|
||||
}
|
||||
|
||||
public static class Sample {
|
||||
static class Sample {
|
||||
|
||||
@Id String id;
|
||||
String field;
|
||||
@@ -1753,4 +2165,16 @@ public class MongoTemplateTests {
|
||||
@Id String id;
|
||||
Date date;
|
||||
}
|
||||
|
||||
static class ObjectWith3AliasedFields {
|
||||
|
||||
@Id String id;
|
||||
@Field("prop1") String property1;
|
||||
@Field("prop2") String property2;
|
||||
@Field("prop3") String property3;
|
||||
}
|
||||
|
||||
static class ObjectWith3AliasedFieldsAndNestedAddress extends ObjectWith3AliasedFields {
|
||||
@Field("adr") Address address;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
||||
public class PersonWithIdPropertyOfTypeBigInteger {
|
||||
|
||||
private BigInteger id;
|
||||
|
||||
private String firstName;
|
||||
|
||||
private int age;
|
||||
|
||||
public BigInteger getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(BigInteger id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getFirstName() {
|
||||
return firstName;
|
||||
}
|
||||
|
||||
public void setFirstName(String firstName) {
|
||||
this.firstName = firstName;
|
||||
}
|
||||
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
public void setAge(int age) {
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "PersonWithIdPropertyOfTypeInteger [id=" + id + ", firstName=" + firstName + ", age=" + age + "]";
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
public class SpecialDoc extends BaseDoc {
|
||||
String specialValue;
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate.UnwrapAndReadDbObjectCallback;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link UnwrapAndReadDbObjectCallback}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class UnwrapAndReadDbObjectCallbackUnitTests {
|
||||
|
||||
@Mock MongoDbFactory factory;
|
||||
|
||||
UnwrapAndReadDbObjectCallback<Target> callback;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
MongoTemplate template = new MongoTemplate(factory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(factory, new MongoMappingContext());
|
||||
|
||||
this.callback = template.new UnwrapAndReadDbObjectCallback<Target>(converter, Target.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void usesFirstLevelValues() {
|
||||
|
||||
Target target = callback.doWith(new BasicDBObject("foo", "bar"));
|
||||
|
||||
assertThat(target.id, is(nullValue()));
|
||||
assertThat(target.foo, is("bar"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void unwrapsUnderscoreIdIfBasicDBObject() {
|
||||
|
||||
Target target = callback.doWith(new BasicDBObject("_id", new BasicDBObject("foo", "bar")));
|
||||
|
||||
assertThat(target.id, is(nullValue()));
|
||||
assertThat(target.foo, is("bar"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void firstLevelPropertiesTrumpNestedOnes() {
|
||||
|
||||
Target target = callback.doWith(new BasicDBObject("_id", new BasicDBObject("foo", "bar")).append("foo", "foobar"));
|
||||
|
||||
assertThat(target.id, is(nullValue()));
|
||||
assertThat(target.foo, is("foobar"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void keepsUnderscoreIdIfScalarValue() {
|
||||
|
||||
Target target = callback.doWith(new BasicDBObject("_id", "bar").append("foo", "foo"));
|
||||
|
||||
assertThat(target.id, is("bar"));
|
||||
assertThat(target.foo, is("foo"));
|
||||
}
|
||||
|
||||
static class Target {
|
||||
|
||||
String id;
|
||||
String foo;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user