Compare commits
145 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
13823690fc | ||
|
|
fc0187eb8e | ||
|
|
07bf5a4f7e | ||
|
|
274adf320d | ||
|
|
aed983462f | ||
|
|
0f5179eb26 | ||
|
|
df40adb618 | ||
|
|
339d8e6f14 | ||
|
|
292e645387 | ||
|
|
df7c007e9e | ||
|
|
9f3465f63c | ||
|
|
2e7c166d38 | ||
|
|
ac7e3a708a | ||
|
|
fa50f74729 | ||
|
|
2aba831389 | ||
|
|
33c665fb98 | ||
|
|
55f83fa0f2 | ||
|
|
13f2472e2f | ||
|
|
5de9994093 | ||
|
|
4d24742181 | ||
|
|
4af876152c | ||
|
|
872e706d2a | ||
|
|
51c66723e9 | ||
|
|
ccfab5d91a | ||
|
|
c94c3f3791 | ||
|
|
4924ff54b6 | ||
|
|
edd9b58d1c | ||
|
|
dc6cbde842 | ||
|
|
aaaf7e57de | ||
|
|
76538aeff1 | ||
|
|
1181a21e51 | ||
|
|
c7f07d2386 | ||
|
|
054ad6fc12 | ||
|
|
1b4ecac732 | ||
|
|
1ef03bf707 | ||
|
|
a1febdb541 | ||
|
|
79fdf44d24 | ||
|
|
d8bb644b30 | ||
|
|
73db7b06b9 | ||
|
|
8483f36f48 | ||
|
|
b426bdd64a | ||
|
|
3349454a51 | ||
|
|
c04343a764 | ||
|
|
05311ea118 | ||
|
|
a18633f847 | ||
|
|
29e2f87ee1 | ||
|
|
5c5accd12d | ||
|
|
08e534adcd | ||
|
|
914acfea16 | ||
|
|
a6728f851b | ||
|
|
81a7515cb7 | ||
|
|
3f4087dc13 | ||
|
|
0430962861 | ||
|
|
8bb62a65db | ||
|
|
d6defbcb56 | ||
|
|
ed47850f71 | ||
|
|
ca1cfdf659 | ||
|
|
95d31d5bb7 | ||
|
|
a7c3ef2aa8 | ||
|
|
3320e49c0b | ||
|
|
286efca52d | ||
|
|
92926befc9 | ||
|
|
703f24ae1c | ||
|
|
febe703954 | ||
|
|
440d16ebc6 | ||
|
|
11c2e90736 | ||
|
|
816a567f29 | ||
|
|
e35486759b | ||
|
|
b80c81f861 | ||
|
|
005d21c0b6 | ||
|
|
3c8b7a54d6 | ||
|
|
b2d59f2539 | ||
|
|
1cc2830e95 | ||
|
|
9a4d6f6fb7 | ||
|
|
e96db8b69b | ||
|
|
13ce75779f | ||
|
|
cc785ecf4f | ||
|
|
41fe1809da | ||
|
|
fd5d39f4d9 | ||
|
|
71d97ff53a | ||
|
|
f2f3faef08 | ||
|
|
b1a488098b | ||
|
|
fd18a8b82c | ||
|
|
761d2748c5 | ||
|
|
e2f3966763 | ||
|
|
2b3e5461c2 | ||
|
|
ca4db459c4 | ||
|
|
e0d0a5dc31 | ||
|
|
18761a39c4 | ||
|
|
ad25751dbb | ||
|
|
879ca6d149 | ||
|
|
8a40cf421c | ||
|
|
fbbb7b6bf7 | ||
|
|
0596403081 | ||
|
|
d9eb18df4d | ||
|
|
132e4a9839 | ||
|
|
4acf8caac1 | ||
|
|
9de3c88e6b | ||
|
|
6115c9562b | ||
|
|
963a222616 | ||
|
|
7a2de49ac1 | ||
|
|
8380806d7a | ||
|
|
e60479e47a | ||
|
|
c7e65cbc40 | ||
|
|
b8e02efb04 | ||
|
|
c7f20fb836 | ||
|
|
28a0202ef4 | ||
|
|
164e947045 | ||
|
|
7e65c0c87d | ||
|
|
9c1f753f17 | ||
|
|
0f821eb52d | ||
|
|
e3aadd63ab | ||
|
|
aa06d520df | ||
|
|
4fa1d4ba97 | ||
|
|
ba9f11b345 | ||
|
|
4777dd2e5e | ||
|
|
64b4591b72 | ||
|
|
bac5961fd8 | ||
|
|
b3cac862d8 | ||
|
|
b8ab2ad539 | ||
|
|
618d5bd5e9 | ||
|
|
096c3278b3 | ||
|
|
c9d9976c22 | ||
|
|
65497f93d4 | ||
|
|
af8a53bef6 | ||
|
|
916b856e97 | ||
|
|
7848da63f2 | ||
|
|
f359a1d31a | ||
|
|
72d645feae | ||
|
|
72fe382bba | ||
|
|
d25e840cf5 | ||
|
|
df1775572a | ||
|
|
86670cd49f | ||
|
|
31a4bf906e | ||
|
|
599291e8b7 | ||
|
|
f5a04fb9fb | ||
|
|
88558b67c3 | ||
|
|
d52cb255e0 | ||
|
|
6c214cbc37 | ||
|
|
01012c1448 | ||
|
|
4c7befb910 | ||
|
|
b62669ec8f | ||
|
|
0fb74caf9b | ||
|
|
9623dac01f | ||
|
|
695b27968c |
@@ -11,7 +11,7 @@ For a comprehensive treatment of all the Spring Data MongoDB features, please re
|
||||
* the [User Guide](http://docs.spring.io/spring-data/mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](http://projects.spring.io/spring-data-mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use the [forum](http://forum.spring.io/forum/spring-projects/data/nosql).
|
||||
* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](http://stackoverflow.com/questions/tagged/spring-data-mongodb).
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://projects.spring.io/).
|
||||
|
||||
@@ -26,7 +26,7 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.4.1.RELEASE</version>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
@@ -36,7 +36,7 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.5.0.BUILD-SNAPSHOT</version>
|
||||
<version>1.6.0.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
@@ -139,7 +139,7 @@ public class MyService {
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
* Get involved with the Spring community on the Spring Community Forums. Please help out on the [forum](http://forum.spring.io/forum/spring-projects/data/nosql) by responding to questions and joining the debate.
|
||||
* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](http://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate.
|
||||
* Create [JIRA](https://jira.springframework.org/browse/DATADOC) tickets for bugs and new features and comment and vote on the ones that you are interested in.
|
||||
* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing.
|
||||
* Watch for upcoming articles on Spring by [subscribing](http://spring.io/blog) to spring.io.
|
||||
|
||||
46
pom.xml
46
pom.xml
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.M1</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,8 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
<version>1.4.7.BUILD-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -29,11 +28,11 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.8.0.M1</springdata.commons>
|
||||
<mongo>2.11.4</mongo>
|
||||
<mongo-osgi>${mongo}</mongo-osgi>
|
||||
<springdata.commons>1.8.7.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>2.12.5</mongo>
|
||||
<mongo.osgi>2.12.5</mongo.osgi>
|
||||
</properties>
|
||||
|
||||
|
||||
<developers>
|
||||
<developer>
|
||||
<id>ogierke</id>
|
||||
@@ -105,11 +104,28 @@
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
|
||||
<id>mongo-next</id>
|
||||
<properties>
|
||||
<mongo>2.12.0-rc0</mongo>
|
||||
<mongo-osgi>2.12.0</mongo-osgi>
|
||||
<mongo>2.13.0</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo-3-next</id>
|
||||
<properties>
|
||||
<mongo>3.0.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
@@ -121,14 +137,14 @@
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>http://repo.spring.io/libs-milestone/</url>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
|
||||
7
spring-data-mongodb-cross-store/aop.xml
Normal file
7
spring-data-mongodb-cross-store/aop.xml
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<aspectj>
|
||||
<aspects>
|
||||
<aspect name="org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect" />
|
||||
<aspect name="org.springframework.data.mongodb.crossstore.MongoDocumentBacking" />
|
||||
</aspects>
|
||||
</aspectj>
|
||||
@@ -2,22 +2,22 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.M1</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
|
||||
<properties>
|
||||
<jpa>1.0.0.Final</jpa>
|
||||
<hibernate>3.6.10.Final</hibernate>
|
||||
</properties>
|
||||
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.5.0.M1</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -56,17 +56,13 @@
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>cglib</groupId>
|
||||
<artifactId>cglib</artifactId>
|
||||
<version>2.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate.javax.persistence</groupId>
|
||||
<artifactId>hibernate-jpa-2.0-api</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- For Tests -->
|
||||
@@ -102,7 +98,7 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>aspectj-maven-plugin</artifactId>
|
||||
<version>1.4</version>
|
||||
<version>1.6</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
@@ -131,8 +127,10 @@
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</aspectLibrary>
|
||||
</aspectLibraries>
|
||||
<complianceLevel>${source.level}</complianceLevel>
|
||||
<source>${source.level}</source>
|
||||
<target>${source.level}</target>
|
||||
<xmlConfigured>aop.xml</xmlConfigured>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
@@ -2,12 +2,12 @@ Bundle-SymbolicName: org.springframework.data.mongodb.crossstore
|
||||
Bundle-Name: Spring Data MongoDB Cross Store Support
|
||||
Bundle-Vendor: Pivotal Software, Inc.
|
||||
Bundle-ManifestVersion: 2
|
||||
Import-Package:
|
||||
Import-Package:
|
||||
sun.reflect;version="0";resolution:=optional
|
||||
Export-Template:
|
||||
org.springframework.data.mongodb.crossstore.*;version="${project.version}"
|
||||
Import-Template:
|
||||
com.mongodb.*;version="${mongo-osgi:[=.=.=,+1.0.0)}",
|
||||
Import-Template:
|
||||
com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}",
|
||||
javax.persistence.*;version="${jpa:[=.=.=,+1.0.0)}",
|
||||
org.aspectj.*;version="${aspectj:[1.0.0, 2.0.0)}",
|
||||
org.bson.*;version="0",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.M1</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.M1</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,5 +5,5 @@ Bundle-ManifestVersion: 2
|
||||
Import-Package:
|
||||
sun.reflect;version="0";resolution:=optional
|
||||
Import-Template:
|
||||
com.mongodb.*;version="${mongo-osgi:[=.=.=,+1.0.0)}",
|
||||
com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}",
|
||||
org.apache.log4j.*;version="${log4j:[=.=.=,+1.0.0)}"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
|
||||
<name>Spring Data MongoDB - Core</name>
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.M1</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
@@ -77,7 +77,7 @@
|
||||
<version>1.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- CDI -->
|
||||
<dependency>
|
||||
<groupId>javax.enterprise</groupId>
|
||||
@@ -86,21 +86,21 @@
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.el</groupId>
|
||||
<artifactId>el-api</artifactId>
|
||||
<version>${cdi}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans.test</groupId>
|
||||
<artifactId>cditest-owb</artifactId>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>servlet-api</artifactId>
|
||||
@@ -115,7 +115,7 @@
|
||||
<version>${validation}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
@@ -129,23 +129,23 @@
|
||||
<version>4.2.0.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
|
||||
@@ -189,9 +189,14 @@
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -37,7 +37,9 @@ import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.FieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.support.CachingIsNewStrategyFactory;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.ClassUtils;
|
||||
@@ -51,6 +53,7 @@ import com.mongodb.Mongo;
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Ryan Tenney
|
||||
*/
|
||||
@Configuration
|
||||
public abstract class AbstractMongoConfiguration {
|
||||
@@ -144,10 +147,7 @@ public abstract class AbstractMongoConfiguration {
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
|
||||
if (abbreviateFieldNames()) {
|
||||
mappingContext.setFieldNamingStrategy(new CamelCaseAbbreviatingFieldNamingStrategy());
|
||||
}
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
@@ -232,4 +232,15 @@ public abstract class AbstractMongoConfiguration {
|
||||
protected boolean abbreviateFieldNames() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
protected FieldNamingStrategy fieldNamingStrategy() {
|
||||
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
|
||||
: PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ import org.springframework.beans.factory.config.BeanDefinitionHolder;
|
||||
import org.springframework.beans.factory.config.RuntimeBeanReference;
|
||||
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
|
||||
import org.springframework.beans.factory.parsing.CompositeComponentDefinition;
|
||||
import org.springframework.beans.factory.parsing.ReaderContext;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
@@ -71,6 +72,7 @@ import org.w3c.dom.Element;
|
||||
* @author Oliver Gierke
|
||||
* @author Maciej Walkowiak
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
@@ -83,8 +85,11 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
*/
|
||||
public BeanDefinition parse(Element element, ParserContext parserContext) {
|
||||
|
||||
BeanDefinitionRegistry registry = parserContext.getRegistry();
|
||||
if (parserContext.isNested()) {
|
||||
parserContext.getReaderContext().error("Mongo Converter must not be defined as nested bean.", element);
|
||||
}
|
||||
|
||||
BeanDefinitionRegistry registry = parserContext.getRegistry();
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME;
|
||||
|
||||
@@ -209,11 +214,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
String abbreviateFieldNames = element.getAttribute("abbreviate-field-names");
|
||||
if ("true".equals(abbreviateFieldNames)) {
|
||||
mappingContextBuilder.addPropertyValue("fieldNamingStrategy", new RootBeanDefinition(
|
||||
CamelCaseAbbreviatingFieldNamingStrategy.class));
|
||||
}
|
||||
parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder);
|
||||
|
||||
ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME
|
||||
: converterId + "." + MAPPING_CONTEXT_BEAN_NAME;
|
||||
@@ -222,6 +223,34 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return ctxRef;
|
||||
}
|
||||
|
||||
private static void parseFieldNamingStrategy(Element element, ReaderContext context, BeanDefinitionBuilder builder) {
|
||||
|
||||
String abbreviateFieldNames = element.getAttribute("abbreviate-field-names");
|
||||
String fieldNamingStrategy = element.getAttribute("field-naming-strategy-ref");
|
||||
|
||||
boolean fieldNamingStrategyReferenced = StringUtils.hasText(fieldNamingStrategy);
|
||||
boolean abbreviationActivated = StringUtils.hasText(abbreviateFieldNames)
|
||||
&& Boolean.parseBoolean(abbreviateFieldNames);
|
||||
|
||||
if (fieldNamingStrategyReferenced && abbreviationActivated) {
|
||||
context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!",
|
||||
element);
|
||||
return;
|
||||
}
|
||||
|
||||
Object value = null;
|
||||
|
||||
if ("true".equals(abbreviateFieldNames)) {
|
||||
value = new RootBeanDefinition(CamelCaseAbbreviatingFieldNamingStrategy.class);
|
||||
} else if (fieldNamingStrategyReferenced) {
|
||||
value = new RuntimeBeanReference(fieldNamingStrategy);
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
builder.addPropertyValue("fieldNamingStrategy", value);
|
||||
}
|
||||
}
|
||||
|
||||
private BeanDefinition getCustomConversions(Element element, ParserContext parserContext) {
|
||||
|
||||
List<Element> customConvertersElements = DomUtils.getChildElementsByTagName(element, "custom-converters");
|
||||
|
||||
@@ -25,7 +25,7 @@ import com.mongodb.DBCursor;
|
||||
interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns th eprepared cursor.
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param cursor
|
||||
*/
|
||||
|
||||
@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.core;
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
@@ -41,6 +43,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
private static final Collection<String> TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere");
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
@@ -140,7 +143,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if ("2d".equals(value)) {
|
||||
if (TWO_D_IDENTIFIERS.contains(value)) {
|
||||
indexFields.add(IndexField.geo(key));
|
||||
} else {
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ public class MongoAction {
|
||||
* @param collectionName the collection name, must not be {@literal null} or empty.
|
||||
* @param entityType the POJO that is being operated against
|
||||
* @param document the converted DBObject from the POJO or Spring Update object
|
||||
* @param query the converted DBOjbect from the Spring Query object
|
||||
* @param query the converted DBObject from the Spring Query object
|
||||
*/
|
||||
public MongoAction(WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation,
|
||||
String collectionName, Class<?> entityType, DBObject document, DBObject query) {
|
||||
|
||||
@@ -23,11 +23,15 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
|
||||
import com.mongodb.MongoCursorNotFoundException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoException.CursorNotFound;
|
||||
import com.mongodb.MongoException.DuplicateKey;
|
||||
import com.mongodb.MongoException.Network;
|
||||
import com.mongodb.MongoInternalException;
|
||||
import com.mongodb.MongoServerSelectionException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.MongoTimeoutException;
|
||||
|
||||
/**
|
||||
* Simple {@link PersistenceExceptionTranslator} for Mongo. Convert the given runtime exception to an appropriate
|
||||
@@ -47,21 +51,23 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
|
||||
// Check for well-known MongoException subclasses.
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof DuplicateKey) {
|
||||
if (ex instanceof DuplicateKey || ex instanceof DuplicateKeyException) {
|
||||
return new DuplicateKeyException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof Network) {
|
||||
if (ex instanceof Network || ex instanceof MongoSocketException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof CursorNotFound) {
|
||||
if (ex instanceof CursorNotFound || ex instanceof MongoCursorNotFoundException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
// Driver 2.12 throws this to indicate connection problems. String comparison to avoid hard dependency
|
||||
if (ex.getClass().getName().equals("com.mongodb.MongoServerSelectionException")) {
|
||||
if (ex instanceof MongoServerSelectionException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoTimeoutException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
@@ -69,6 +75,7 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new InvalidDataAccessResourceUsageException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
int code = ((MongoException) ex).getCode();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -416,7 +416,9 @@ public interface MongoOperations {
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Will consider entity mapping
|
||||
* information to determine the collection the query is ran against.
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
@@ -425,7 +427,9 @@ public interface MongoOperations {
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}.
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
|
||||
@@ -115,6 +115,7 @@ import com.mongodb.WriteConcern;
|
||||
import com.mongodb.WriteResult;
|
||||
import com.mongodb.util.JSON;
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* Primary implementation of {@link MongoOperations}.
|
||||
*
|
||||
@@ -334,7 +335,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
CommandResult result = execute(new DbCallback<CommandResult>() {
|
||||
public CommandResult doInDB(DB db) throws MongoException, DataAccessException {
|
||||
return db.command(command, options);
|
||||
return readPreference != null ? db.command(command, readPreference) : db.command(command);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -354,7 +355,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch) {
|
||||
executeQuery(query, collectionName, dch, new QueryCursorPreparer(query));
|
||||
executeQuery(query, collectionName, dch, new QueryCursorPreparer(query, null));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -532,7 +533,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass,
|
||||
new QueryCursorPreparer(query));
|
||||
new QueryCursorPreparer(query, entityClass));
|
||||
}
|
||||
|
||||
public <T> T findById(Object id, Class<T> entityClass) {
|
||||
@@ -565,7 +566,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
BasicDBObject command = new BasicDBObject("geoNear", collection);
|
||||
command.putAll(near.toDBObject());
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, getDb().getOptions());
|
||||
List<Object> results = (List<Object>) commandResult.get("results");
|
||||
results = results == null ? Collections.emptyList() : results;
|
||||
|
||||
@@ -614,8 +615,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
public <T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName) {
|
||||
return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), query.getSortObject(),
|
||||
entityClass, update, options);
|
||||
return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(),
|
||||
getMappedSortObject(query, entityClass), entityClass, update, options);
|
||||
}
|
||||
|
||||
// Find methods that take a Query to express the query and that return a single object that is also removed from the
|
||||
@@ -626,8 +627,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public <T> T findAndRemove(Query query, Class<T> entityClass, String collectionName) {
|
||||
return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), query.getSortObject(),
|
||||
entityClass);
|
||||
|
||||
return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(),
|
||||
getMappedSortObject(query, entityClass), entityClass);
|
||||
}
|
||||
|
||||
public long count(Query query, Class<?> entityClass) {
|
||||
@@ -761,27 +763,33 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
protected <T> void doInsertAll(Collection<? extends T> listToSave, MongoWriter<T> writer) {
|
||||
Map<String, List<T>> objs = new HashMap<String, List<T>>();
|
||||
|
||||
for (T o : listToSave) {
|
||||
Map<String, List<T>> elementsByCollection = new HashMap<String, List<T>>();
|
||||
|
||||
for (T element : listToSave) {
|
||||
|
||||
if (element == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(element.getClass());
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(o.getClass());
|
||||
if (entity == null) {
|
||||
throw new InvalidDataAccessApiUsageException("No Persitent Entity information found for the class "
|
||||
+ o.getClass().getName());
|
||||
throw new InvalidDataAccessApiUsageException("No PersistentEntity information found for " + element.getClass());
|
||||
}
|
||||
|
||||
String collection = entity.getCollection();
|
||||
List<T> collectionElements = elementsByCollection.get(collection);
|
||||
|
||||
List<T> objList = objs.get(collection);
|
||||
if (null == objList) {
|
||||
objList = new ArrayList<T>();
|
||||
objs.put(collection, objList);
|
||||
if (null == collectionElements) {
|
||||
collectionElements = new ArrayList<T>();
|
||||
elementsByCollection.put(collection, collectionElements);
|
||||
}
|
||||
objList.add(o);
|
||||
|
||||
collectionElements.add(element);
|
||||
}
|
||||
|
||||
for (Map.Entry<String, List<T>> entry : objs.entrySet()) {
|
||||
for (Map.Entry<String, List<T>> entry : elementsByCollection.entrySet()) {
|
||||
doInsertBatch(entry.getKey(), entry.getValue(), this.mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -1005,8 +1013,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Calling update using query: " + queryObj + " and update: " + updateObj + " in collection: "
|
||||
+ collectionName);
|
||||
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName));
|
||||
}
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName,
|
||||
@@ -1068,17 +1076,22 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@link Entry} containing the {@link MongoPersistentProperty} defining the {@literal id} as
|
||||
* {@link Entry#getKey()} and the {@link Id}s property value as its {@link Entry#getValue()}.
|
||||
* Returns {@link Entry} containing the field name of the id property as {@link Entry#getKey()} and the {@link Id}s
|
||||
* property value as its {@link Entry#getValue()}.
|
||||
*
|
||||
* @param object
|
||||
* @return
|
||||
*/
|
||||
private Map.Entry<MongoPersistentProperty, Object> extractIdPropertyAndValue(Object object) {
|
||||
private Entry<String, Object> extractIdPropertyAndValue(Object object) {
|
||||
|
||||
Assert.notNull(object, "Id cannot be extracted from 'null'.");
|
||||
|
||||
Class<?> objectType = object.getClass();
|
||||
|
||||
if (object instanceof DBObject) {
|
||||
return Collections.singletonMap(ID_FIELD, ((DBObject) object).get(ID_FIELD)).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(objectType);
|
||||
MongoPersistentProperty idProp = entity == null ? null : entity.getIdProperty();
|
||||
|
||||
@@ -1088,7 +1101,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
Object idValue = BeanWrapper.create(object, mongoConverter.getConversionService())
|
||||
.getProperty(idProp, Object.class);
|
||||
return Collections.singletonMap(idProp, idValue).entrySet().iterator().next();
|
||||
return Collections.singletonMap(idProp.getFieldName(), idValue).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1099,8 +1112,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
private Query getIdQueryFor(Object object) {
|
||||
|
||||
Map.Entry<MongoPersistentProperty, Object> id = extractIdPropertyAndValue(object);
|
||||
return new Query(where(id.getKey().getFieldName()).is(id.getValue()));
|
||||
Entry<String, Object> id = extractIdPropertyAndValue(object);
|
||||
return new Query(where(id.getKey()).is(id.getValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1114,7 +1127,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Assert.notEmpty(objects, "Cannot create Query for empty collection.");
|
||||
|
||||
Iterator<?> it = objects.iterator();
|
||||
Map.Entry<MongoPersistentProperty, Object> firstEntry = extractIdPropertyAndValue(it.next());
|
||||
Entry<String, Object> firstEntry = extractIdPropertyAndValue(it.next());
|
||||
|
||||
ArrayList<Object> ids = new ArrayList<Object>(objects.size());
|
||||
ids.add(firstEntry.getValue());
|
||||
@@ -1123,7 +1136,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
ids.add(extractIdPropertyAndValue(it.next()).getValue());
|
||||
}
|
||||
|
||||
return new Query(where(firstEntry.getKey().getFieldName()).in(ids));
|
||||
return new Query(where(firstEntry.getKey()).in(ids));
|
||||
}
|
||||
|
||||
private void assertUpdateableIdIfNotSet(Object entity) {
|
||||
@@ -1180,7 +1193,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { dboq, collection.getName() });
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { serializeToJsonSafely(dboq),
|
||||
collection.getName() });
|
||||
}
|
||||
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) : collection.remove(dboq,
|
||||
@@ -1408,7 +1422,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, getDb().getOptions());
|
||||
handleCommandError(commandResult, command);
|
||||
|
||||
// map results
|
||||
@@ -1453,13 +1467,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
"Can not use skip or field specification with map reduce operations");
|
||||
}
|
||||
if (query.getQueryObject() != null) {
|
||||
copyMapReduceOptions.put("query", query.getQueryObject());
|
||||
copyMapReduceOptions.put("query", queryMapper.getMappedObject(query.getQueryObject(), null));
|
||||
}
|
||||
if (query.getLimit() > 0) {
|
||||
copyMapReduceOptions.put("limit", query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
copyMapReduceOptions.put("sort", query.getSortObject());
|
||||
copyMapReduceOptions.put("sort", queryMapper.getMappedObject(query.getSortObject(), null));
|
||||
}
|
||||
}
|
||||
return copyMapReduceOptions;
|
||||
@@ -1599,7 +1613,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback,
|
||||
@@ -1637,8 +1651,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Class<T> entityClass) {
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndRemove using query: " + query + " fields: " + fields + " sort: " + sort + " for class: "
|
||||
+ entityClass + " in collection: " + collectionName);
|
||||
LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), fields, sort, entityClass, collectionName));
|
||||
}
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
@@ -1662,8 +1676,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
+ " for class: " + entityClass + " and update: " + mappedUpdate + " in collection: " + collectionName);
|
||||
LOGGER.debug(String.format("findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
|
||||
+ "in collection: %s", serializeToJsonSafely(mappedQuery), fields, sort, entityClass,
|
||||
serializeToJsonSafely(mappedUpdate), collectionName));
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
@@ -1941,6 +1956,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return converter;
|
||||
}
|
||||
|
||||
private DBObject getMappedSortObject(Query query, Class<?> type) {
|
||||
|
||||
if (query == null || query.getSortObject() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
return queryMapper.getMappedObject(query.getSortObject(), entity);
|
||||
}
|
||||
|
||||
// Callback implementations
|
||||
|
||||
/**
|
||||
@@ -1963,13 +1988,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
if (fields == null) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: " + query + " in db.collection: " + collection.getFullName());
|
||||
LOGGER.debug(String.format("findOne using query: %s in db.collection: %s", serializeToJsonSafely(query),
|
||||
collection.getFullName()));
|
||||
}
|
||||
return collection.findOne(query);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: " + query + " fields: " + fields + " in db.collection: "
|
||||
+ collection.getFullName());
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s in db.collection: %s",
|
||||
serializeToJsonSafely(query), fields, collection.getFullName()));
|
||||
}
|
||||
return collection.findOne(query, fields);
|
||||
}
|
||||
@@ -2134,9 +2160,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
class QueryCursorPreparer implements CursorPreparer {
|
||||
|
||||
private final Query query;
|
||||
private final Class<?> type;
|
||||
|
||||
public QueryCursorPreparer(Query query, Class<?> type) {
|
||||
|
||||
public QueryCursorPreparer(Query query) {
|
||||
this.query = query;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -2164,7 +2193,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
cursorToUse = cursorToUse.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
cursorToUse = cursorToUse.sort(query.getSortObject());
|
||||
DBObject sortDbo = type != null ? getMappedSortObject(query, type) : query.getSortObject();
|
||||
cursorToUse = cursorToUse.sort(sortDbo);
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
cursorToUse = cursorToUse.hint(query.getHint());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -248,7 +248,7 @@ public class Aggregation {
|
||||
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields());
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), rootContext);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either sythetic or non-synthetic way.
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either synthetic or non-synthetic way.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param synthetic
|
||||
@@ -107,7 +107,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} with the given orignals and synthetics.
|
||||
* Creates a new {@link ExposedFields} with the given originals and synthetics.
|
||||
*
|
||||
* @param originals must not be {@literal null}.
|
||||
* @param synthetic must not be {@literal null}.
|
||||
@@ -268,14 +268,21 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
return field.isAliased();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the synthetic
|
||||
*/
|
||||
public boolean isSynthetic() {
|
||||
return synthetic;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field can be referred to using the given name.
|
||||
*
|
||||
* @param input
|
||||
* @param name
|
||||
* @return
|
||||
*/
|
||||
public boolean canBeReferredToBy(String input) {
|
||||
return getTarget().equals(input);
|
||||
public boolean canBeReferredToBy(String name) {
|
||||
return getName().equals(name) || getTarget().equals(name);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -340,6 +347,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
public FieldReference(ExposedField field) {
|
||||
|
||||
Assert.notNull(field, "ExposedField must not be null!");
|
||||
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
@@ -355,7 +363,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the referenve value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* Returns the reference value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,16 +32,22 @@ import com.mongodb.DBObject;
|
||||
class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final ExposedFields exposedFields;
|
||||
private final AggregationOperationContext rootContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}.
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given
|
||||
* {@link AggregationOperationContext} to perform a mapping to mongo types if necessary.
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
* @param rootContext must not be {@literal null}.
|
||||
*/
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields) {
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, AggregationOperationContext rootContext) {
|
||||
|
||||
Assert.notNull(exposedFields, "ExposedFields must not be null!");
|
||||
Assert.notNull(rootContext, "RootContext must not be null!");
|
||||
|
||||
this.exposedFields = exposedFields;
|
||||
this.rootContext = rootContext;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -50,7 +56,7 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
*/
|
||||
@Override
|
||||
public DBObject getMappedObject(DBObject dbObject) {
|
||||
return dbObject;
|
||||
return rootContext.getMappedObject(dbObject);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -59,7 +65,7 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
return getReference(field.getTarget());
|
||||
return getReference(field, field.getTarget());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -68,11 +74,42 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
return getReference(null, name);
|
||||
}
|
||||
|
||||
ExposedField field = exposedFields.getField(name);
|
||||
/**
|
||||
* Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}.
|
||||
*
|
||||
* @param field may be {@literal null}
|
||||
* @param name must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private FieldReference getReference(Field field, String name) {
|
||||
|
||||
if (field != null) {
|
||||
return new FieldReference(field);
|
||||
Assert.notNull(name, "Name must not be null!");
|
||||
|
||||
ExposedField exposedField = exposedFields.getField(name);
|
||||
|
||||
if (exposedField != null) {
|
||||
|
||||
if (field != null) {
|
||||
// we return a FieldReference to the given field directly to make sure that we reference the proper alias here.
|
||||
return new FieldReference(new ExposedField(field, exposedField.isSynthetic()));
|
||||
}
|
||||
|
||||
return new FieldReference(exposedField);
|
||||
}
|
||||
|
||||
if (name.contains(".")) {
|
||||
|
||||
// for nested field references we only check that the root field exists.
|
||||
ExposedField rootField = exposedFields.getField(name.split("\\.")[0]);
|
||||
|
||||
if (rootField != null) {
|
||||
|
||||
// We have to synthetic to true, in order to render the field-name as is.
|
||||
return new FieldReference(new ExposedField(name, true));
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,7 +28,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
class LimitOperation implements AggregationOperation {
|
||||
public class LimitOperation implements AggregationOperation {
|
||||
|
||||
private final long maxElements;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -32,17 +33,29 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
public class MatchOperation implements AggregationOperation {
|
||||
|
||||
private final Criteria criteria;
|
||||
private final CriteriaDefinition criteriaDefinition;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link Criteria}.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @deprecated Use {@link MatchOperation#MatchOperation(CriteriaDefinition)} instead. This constructor is scheduled
|
||||
* for removal in the next versions.
|
||||
*/
|
||||
@Deprecated
|
||||
public MatchOperation(Criteria criteria) {
|
||||
this((CriteriaDefinition) criteria);
|
||||
}
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
this.criteria = criteria;
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
*/
|
||||
public MatchOperation(CriteriaDefinition criteriaDefinition) {
|
||||
|
||||
Assert.notNull(criteriaDefinition, "Criteria must not be null!");
|
||||
this.criteriaDefinition = criteriaDefinition;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -51,6 +64,6 @@ public class MatchOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$match", context.getMappedObject(criteria.getCriteriaObject()));
|
||||
return new BasicDBObject("$match", context.getMappedObject(criteriaDefinition.getCriteriaObject()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,7 +20,7 @@ import java.math.BigInteger;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.ConversionServiceFactory;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
@@ -46,10 +46,8 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
*
|
||||
* @param conversionService
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public AbstractMongoConverter(GenericConversionService conversionService) {
|
||||
this.conversionService = conversionService == null ? ConversionServiceFactory.createDefaultConversionService()
|
||||
: conversionService;
|
||||
this.conversionService = conversionService == null ? new DefaultConversionService() : conversionService;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,14 +17,15 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -69,10 +70,13 @@ public class CustomConversions {
|
||||
private final Set<ConvertiblePair> writingPairs;
|
||||
private final Set<Class<?>> customSimpleTypes;
|
||||
private final SimpleTypeHolder simpleTypeHolder;
|
||||
private final ConcurrentMap<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
|
||||
private final List<Object> converters;
|
||||
|
||||
private final Map<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
private final Map<ConvertiblePair, CacheValue> customWriteTargetTypes;
|
||||
private final Map<Class<?>, CacheValue> rawWriteTargetTypes;
|
||||
|
||||
/**
|
||||
* Creates an empty {@link CustomConversions} object.
|
||||
*/
|
||||
@@ -92,7 +96,9 @@ public class CustomConversions {
|
||||
this.readingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.writingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.customSimpleTypes = new HashSet<Class<?>>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<GenericConverter.ConvertiblePair, CacheValue>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<ConvertiblePair, CacheValue>();
|
||||
this.customWriteTargetTypes = new ConcurrentHashMap<ConvertiblePair, CacheValue>();
|
||||
this.rawWriteTargetTypes = new ConcurrentHashMap<Class<?>, CacheValue>();
|
||||
|
||||
List<Object> toRegister = new ArrayList<Object>();
|
||||
|
||||
@@ -235,70 +241,103 @@ public class CustomConversions {
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType) {
|
||||
return getCustomWriteTarget(sourceType, null);
|
||||
public Class<?> getCustomWriteTarget(final Class<?> sourceType) {
|
||||
|
||||
return getOrCreateAndCache(sourceType, rawWriteTargetTypes, new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, null, writingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the target type we can write an onject of the given source type to. The returned type might be a subclass
|
||||
* oth the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply return the
|
||||
* first target type matching or {@literal null} if no conversion can be found.
|
||||
* Returns the target type we can readTargetWriteLocl an inject of the given source type to. The returned type might
|
||||
* be a subclass of the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply
|
||||
* return the first target type matching or {@literal null} if no conversion can be found.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
public Class<?> getCustomWriteTarget(final Class<?> sourceType, final Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
if (requestedTargetType == null) {
|
||||
return getCustomWriteTarget(sourceType);
|
||||
}
|
||||
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customWriteTargetTypes,
|
||||
new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write into a Mongo native type. The returned type might
|
||||
* be a subclass of the given expected type though.
|
||||
* Returns whether we have a custom conversion registered to readTargetWriteLocl into a Mongo native type. The
|
||||
* returned type might be a subclass of the given expected type though.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return hasCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write an object of the given source type into an object
|
||||
* of the given Mongo native target type.
|
||||
* Returns whether we have a custom conversion registered to readTargetWriteLocl an object of the given source type
|
||||
* into an object of the given Mongo native target type.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return getCustomWriteTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to read the given source into the given target type.
|
||||
* Returns whether we have a custom conversion registered to readTargetReadLock the given source into the given target
|
||||
* type.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(requestedTargetType);
|
||||
|
||||
return getCustomReadTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair} for ones that have a source compatible type as source. Additionally
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(final Class<?> sourceType, final Class<?> requestedTargetType) {
|
||||
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customReadTargetTypes,
|
||||
new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, requestedTargetType, readingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair}s for ones that have a source compatible type as source. Additionally
|
||||
* checks assignability of the target type if one is given.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
@@ -307,11 +346,15 @@ public class CustomConversions {
|
||||
* @return
|
||||
*/
|
||||
private static Class<?> getCustomTarget(Class<?> sourceType, Class<?> requestedTargetType,
|
||||
Iterable<ConvertiblePair> pairs) {
|
||||
Collection<ConvertiblePair> pairs) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(pairs);
|
||||
|
||||
if (requestedTargetType != null && pairs.contains(new ConvertiblePair(sourceType, requestedTargetType))) {
|
||||
return requestedTargetType;
|
||||
}
|
||||
|
||||
for (ConvertiblePair typePair : pairs) {
|
||||
if (typePair.getSourceType().isAssignableFrom(sourceType)) {
|
||||
Class<?> targetType = typePair.getTargetType();
|
||||
@@ -325,32 +368,31 @@ public class CustomConversions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
* Will try to find a value for the given key in the given cache or produce one using the given {@link Producer} and
|
||||
* store it in the cache.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @param key the key to lookup a potentially existing value, must not be {@literal null}.
|
||||
* @param cache the cache to find the value in, must not be {@literal null}.
|
||||
* @param producer the {@link Producer} to create values to cache, must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
private static <T> Class<?> getOrCreateAndCache(T key, Map<T, CacheValue> cache, Producer producer) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
CacheValue cacheValue = cache.get(key);
|
||||
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
if (cacheValue != null) {
|
||||
return cacheValue.getType();
|
||||
}
|
||||
|
||||
ConvertiblePair lookupKey = new ConvertiblePair(sourceType, requestedTargetType);
|
||||
CacheValue readTargetTypeValue = customReadTargetTypes.get(lookupKey);
|
||||
Class<?> type = producer.get();
|
||||
cache.put(key, CacheValue.of(type));
|
||||
|
||||
if (readTargetTypeValue != null) {
|
||||
return readTargetTypeValue.getType();
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
readTargetTypeValue = CacheValue.of(getCustomTarget(sourceType, requestedTargetType, readingPairs));
|
||||
CacheValue cacheValue = customReadTargetTypes.putIfAbsent(lookupKey, readTargetTypeValue);
|
||||
private interface Producer {
|
||||
|
||||
return cacheValue != null ? cacheValue.getType() : readTargetTypeValue.getType();
|
||||
Class<?> get();
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
class DBObjectAccessor {
|
||||
|
||||
private final DBObject dbObject;
|
||||
private final BasicDBObject dbObject;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DBObjectAccessor} for the given {@link DBObject}.
|
||||
@@ -46,7 +46,7 @@ class DBObjectAccessor {
|
||||
Assert.notNull(dbObject, "DBObject must not be null!");
|
||||
Assert.isInstanceOf(BasicDBObject.class, dbObject, "Given DBObject must be a BasicDBObject!");
|
||||
|
||||
this.dbObject = dbObject;
|
||||
this.dbObject = (BasicDBObject) dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -62,6 +62,11 @@ class DBObjectAccessor {
|
||||
Assert.notNull(prop, "MongoPersistentProperty must not be null!");
|
||||
String fieldName = prop.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
dbObject.put(fieldName, value);
|
||||
return;
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
DBObject dbObject = this.dbObject;
|
||||
|
||||
@@ -87,12 +92,16 @@ class DBObjectAccessor {
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return this.dbObject.get(fieldName);
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<Object, Object> source = this.dbObject.toMap();
|
||||
Map<String, Object> source = this.dbObject;
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
@@ -108,14 +117,14 @@ class DBObjectAccessor {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<Object, Object> getAsMap(Object source) {
|
||||
private Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return ((DBObject) source).toMap();
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<Object, Object>) source;
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -28,11 +28,11 @@ import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.objenesis.Objenesis;
|
||||
import org.objenesis.ObjenesisStd;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.core.SpringVersion;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
@@ -57,7 +57,6 @@ import com.mongodb.DBRef;
|
||||
*/
|
||||
public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
private static final boolean IS_SPRING_4_OR_BETTER = SpringVersion.getVersion().startsWith("4");
|
||||
private static final boolean OBJENESIS_PRESENT = ClassUtils.isPresent("org.objenesis.Objenesis", null);
|
||||
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
@@ -138,7 +137,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
proxyFactory.setProxyTargetClass(true);
|
||||
proxyFactory.setTargetClass(propertyType);
|
||||
|
||||
if (IS_SPRING_4_OR_BETTER || !OBJENESIS_PRESENT) {
|
||||
if (!OBJENESIS_PRESENT) {
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
return proxyFactory.getProxy();
|
||||
}
|
||||
@@ -374,13 +373,27 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Static class to accomodate optional dependency on Objenesis.
|
||||
* Static class to accommodate optional dependency on Objenesis.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @since 1.4
|
||||
*/
|
||||
private static class ObjenesisProxyEnhancer {
|
||||
|
||||
private static final Objenesis OBJENESIS = new ObjenesisStd(true);
|
||||
private static final boolean IS_SPRING_4_OR_BETTER = ClassUtils.isPresent(
|
||||
"org.springframework.core.DefaultParameterNameDiscoverer", null);
|
||||
|
||||
private static final InstanceCreatorStrategy INSTANCE_CREATOR;
|
||||
|
||||
static {
|
||||
|
||||
if (IS_SPRING_4_OR_BETTER) {
|
||||
INSTANCE_CREATOR = new Spring4ObjenesisInstanceCreatorStrategy();
|
||||
} else {
|
||||
INSTANCE_CREATOR = new DefaultObjenesisInstanceCreatorStrategy();
|
||||
}
|
||||
}
|
||||
|
||||
public static Object enhanceAndGet(ProxyFactory proxyFactory, Class<?> type,
|
||||
org.springframework.cglib.proxy.MethodInterceptor interceptor) {
|
||||
@@ -390,9 +403,77 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
Factory factory = (Factory) OBJENESIS.newInstance(enhancer.createClass());
|
||||
Factory factory = (Factory) INSTANCE_CREATOR.newInstance(enhancer.createClass());
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
return factory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Strategy for constructing new instances of a given {@link Class}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
interface InstanceCreatorStrategy {
|
||||
Object newInstance(Class<?> clazz);
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link InstanceCreatorStrategy} that uses Objenesis from the classpath.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class DefaultObjenesisInstanceCreatorStrategy implements InstanceCreatorStrategy {
|
||||
|
||||
private static final Objenesis OBJENESIS = new ObjenesisStd(true);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.ObjenesisProxyEnhancer.InstanceCreatorStrategy#newInstance(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Object newInstance(Class<?> clazz) {
|
||||
return OBJENESIS.newInstance(clazz);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link InstanceCreatorStrategy} that uses a repackaged version of Objenesis from Spring 4.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class Spring4ObjenesisInstanceCreatorStrategy implements InstanceCreatorStrategy {
|
||||
|
||||
private static final String SPRING4_OBJENESIS_CLASS_NAME = "org.springframework.objenesis.ObjenesisStd";
|
||||
private static final Object OBJENESIS;
|
||||
private static final Method NEW_INSTANCE_METHOD;
|
||||
|
||||
static {
|
||||
|
||||
try {
|
||||
Class<?> objenesisClass = ClassUtils.forName(SPRING4_OBJENESIS_CLASS_NAME,
|
||||
ObjenesisProxyEnhancer.class.getClassLoader());
|
||||
|
||||
OBJENESIS = BeanUtils.instantiateClass(objenesisClass.getConstructor(boolean.class), true);
|
||||
NEW_INSTANCE_METHOD = objenesisClass.getMethod("newInstance", Class.class);
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Could not setup Objenesis infrastructure with Spring 4 ", e);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.ObjenesisProxyEnhancer.InstanceCreatorStrategy#newInstance(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Object newInstance(Class<?> clazz) {
|
||||
|
||||
try {
|
||||
return NEW_INSTANCE_METHOD.invoke(OBJENESIS, clazz);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Could not created instance for " + clazz, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,6 +56,7 @@ import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
@@ -75,6 +76,8 @@ import com.mongodb.DBRef;
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s";
|
||||
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
|
||||
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -213,6 +216,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (S) readMap(typeToUse, dbo, parent);
|
||||
}
|
||||
|
||||
if (dbo instanceof BasicDBList) {
|
||||
throw new MappingException(String.format(INCOMPATIBLE_TYPES, dbo, BasicDBList.class, typeToUse.getType(), parent));
|
||||
}
|
||||
|
||||
// Retrieve persistent entity info
|
||||
MongoPersistentEntity<S> persistentEntity = (MongoPersistentEntity<S>) mappingContext
|
||||
.getPersistentEntity(typeToUse);
|
||||
@@ -253,8 +260,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
Object obj = getValueInternal(prop, dbo, evaluator, result);
|
||||
wrapper.setProperty(prop, obj);
|
||||
wrapper.setProperty(prop, getValueInternal(prop, dbo, evaluator, result));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -262,19 +268,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
MongoPersistentProperty property = association.getInverse();
|
||||
final MongoPersistentProperty property = association.getInverse();
|
||||
Object value = dbo.get(property.getFieldName());
|
||||
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
Object value = dbo.get(property.getName());
|
||||
DBRef dbref = value instanceof DBRef ? (DBRef) value : null;
|
||||
Object obj = dbRefResolver.resolveDbRef(property, dbref, new DbRefResolverCallback() {
|
||||
wrapper.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, new DbRefResolverCallback() {
|
||||
|
||||
@Override
|
||||
public Object resolve(MongoPersistentProperty property) {
|
||||
return getValueInternal(property, dbo, evaluator, parent);
|
||||
}
|
||||
});
|
||||
|
||||
wrapper.setProperty(property, obj);
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -294,6 +302,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef!");
|
||||
}
|
||||
|
||||
// @see DATAMONGO-913
|
||||
if (object instanceof LazyLoadingProxy) {
|
||||
return ((LazyLoadingProxy) object).toDBRef();
|
||||
}
|
||||
|
||||
return createDBRef(object, referingProperty);
|
||||
}
|
||||
|
||||
@@ -309,14 +322,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
boolean handledByCustomConverter = conversions.getCustomWriteTarget(obj.getClass(), DBObject.class) != null;
|
||||
TypeInformation<? extends Object> type = ClassTypeInformation.from(obj.getClass());
|
||||
Class<?> entityType = obj.getClass();
|
||||
boolean handledByCustomConverter = conversions.getCustomWriteTarget(entityType, DBObject.class) != null;
|
||||
TypeInformation<? extends Object> type = ClassTypeInformation.from(entityType);
|
||||
|
||||
if (!handledByCustomConverter && !(dbo instanceof BasicDBList)) {
|
||||
typeMapper.writeType(type, dbo);
|
||||
}
|
||||
|
||||
writeInternal(obj, dbo, type);
|
||||
Object target = obj instanceof LazyLoadingProxy ? ((LazyLoadingProxy) obj).initialize() : obj;
|
||||
|
||||
writeInternal(target, dbo, type);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -332,7 +348,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
Class<?> customTarget = conversions.getCustomWriteTarget(obj.getClass(), DBObject.class);
|
||||
Class<?> entityType = obj.getClass();
|
||||
Class<?> customTarget = conversions.getCustomWriteTarget(entityType, DBObject.class);
|
||||
|
||||
if (customTarget != null) {
|
||||
DBObject result = conversionService.convert(obj, DBObject.class);
|
||||
@@ -340,17 +357,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
if (Map.class.isAssignableFrom(obj.getClass())) {
|
||||
if (Map.class.isAssignableFrom(entityType)) {
|
||||
writeMapInternal((Map<Object, Object>) obj, dbo, ClassTypeInformation.MAP);
|
||||
return;
|
||||
}
|
||||
|
||||
if (Collection.class.isAssignableFrom(obj.getClass())) {
|
||||
if (Collection.class.isAssignableFrom(entityType)) {
|
||||
writeCollectionInternal((Collection<?>) obj, ClassTypeInformation.LIST, (BasicDBList) dbo);
|
||||
return;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(obj.getClass());
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityType);
|
||||
writeInternal(obj, dbo, entity);
|
||||
addCustomTypeKeyIfNecessary(typeHint, obj, dbo);
|
||||
}
|
||||
@@ -554,7 +571,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
String simpleKey = prepareMapKey(key.toString());
|
||||
dbObject.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
|
||||
} else {
|
||||
@@ -606,12 +623,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected DBObject writeMapInternal(Map<Object, Object> obj, DBObject dbo, TypeInformation<?> propertyType) {
|
||||
|
||||
for (Map.Entry<Object, Object> entry : obj.entrySet()) {
|
||||
|
||||
Object key = entry.getKey();
|
||||
Object val = entry.getValue();
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
// Don't use conversion service here as removal of ObjectToString converter results in some primitive types not
|
||||
// being convertable
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
|
||||
String simpleKey = prepareMapKey(key);
|
||||
if (val == null || conversions.isSimpleType(val.getClass())) {
|
||||
writeSimpleInternal(val, dbo, simpleKey);
|
||||
} else if (val instanceof Collection || val.getClass().isArray()) {
|
||||
@@ -632,6 +650,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the given {@link Map} key to be converted into a {@link String}. Will invoke potentially registered custom
|
||||
* conversions and escape dots from the result as they're not supported as {@link Map} key in MongoDB.
|
||||
*
|
||||
* @param key must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String prepareMapKey(Object key) {
|
||||
|
||||
Assert.notNull(key, "Map key must not be null!");
|
||||
|
||||
String convertedKey = potentiallyConvertMapKey(key);
|
||||
return potentiallyEscapeMapKey(convertedKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Potentially replaces dots in the given map key with the configured map key replacement if configured or aborts
|
||||
* conversion if none is configured.
|
||||
@@ -654,6 +687,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return source.replaceAll("\\.", mapKeyDotReplacement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link String} representation of the given {@link Map} key
|
||||
*
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
private String potentiallyConvertMapKey(Object key) {
|
||||
|
||||
if (key instanceof String) {
|
||||
return (String) key;
|
||||
}
|
||||
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class) ? (String) getPotentiallyConvertedSimpleWrite(key)
|
||||
: key.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates the map key replacements in the given key just read with a dot in case a map key replacement has been
|
||||
* configured.
|
||||
@@ -677,10 +726,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
TypeInformation<?> actualType = type != null ? type.getActualType() : null;
|
||||
Class<?> reference = actualType == null ? Object.class : actualType.getType();
|
||||
Class<?> valueType = ClassUtils.getUserClass(value.getClass());
|
||||
|
||||
boolean notTheSameClass = !value.getClass().equals(reference);
|
||||
boolean notTheSameClass = !valueType.equals(reference);
|
||||
if (notTheSameClass) {
|
||||
typeMapper.writeType(value.getClass(), dbObject);
|
||||
typeMapper.writeType(valueType, dbObject);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -733,7 +783,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private Object getPotentiallyConvertedSimpleRead(Object value, Class<?> target) {
|
||||
|
||||
if (value == null || target == null) {
|
||||
if (value == null || target == null || target.isAssignableFrom(value.getClass())) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -745,7 +795,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return Enum.valueOf((Class<Enum>) target, value.toString());
|
||||
}
|
||||
|
||||
return target.isAssignableFrom(value.getClass()) ? value : conversionService.convert(value, target);
|
||||
return conversionService.convert(value, target);
|
||||
}
|
||||
|
||||
protected DBRef createDBRef(Object target, MongoPersistentProperty property) {
|
||||
@@ -786,11 +836,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
idMapper.convertId(id));
|
||||
}
|
||||
|
||||
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator eval,
|
||||
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator evaluator,
|
||||
Object parent) {
|
||||
|
||||
MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(dbo, spELContext, parent);
|
||||
return provider.getPropertyValue(prop);
|
||||
return new MongoDbPropertyValueProvider(dbo, evaluator, parent).getPropertyValue(prop);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -919,7 +968,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return getPotentiallyConvertedSimpleWrite(obj);
|
||||
}
|
||||
|
||||
TypeInformation<?> typeHint = typeInformation == null ? null : ClassTypeInformation.OBJECT;
|
||||
TypeInformation<?> typeHint = typeInformation;
|
||||
|
||||
if (obj instanceof BasicDBList) {
|
||||
return maybeConvertList((BasicDBList) obj, typeHint);
|
||||
@@ -1007,17 +1056,27 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field
|
||||
* of the configured source {@link DBObject}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private class MongoDbPropertyValueProvider implements PropertyValueProvider<MongoPersistentProperty> {
|
||||
|
||||
private final DBObjectAccessor source;
|
||||
private final SpELExpressionEvaluator evaluator;
|
||||
private final Object parent;
|
||||
|
||||
public MongoDbPropertyValueProvider(DBObject source, SpELContext factory, Object parent) {
|
||||
this(source, new DefaultSpELExpressionEvaluator(source, factory), parent);
|
||||
}
|
||||
|
||||
public MongoDbPropertyValueProvider(DBObject source, DefaultSpELExpressionEvaluator evaluator, Object parent) {
|
||||
/**
|
||||
* Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and
|
||||
* parent object.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param evaluator must not be {@literal null}.
|
||||
* @param parent can be {@literal null}.
|
||||
*/
|
||||
public MongoDbPropertyValueProvider(DBObject source, SpELExpressionEvaluator evaluator, Object parent) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(evaluator);
|
||||
|
||||
@@ -250,14 +250,31 @@ public class QueryMapper {
|
||||
* type of the given value is compatible with the type of the given document field in order to deal with potential
|
||||
* query field exclusions, since MongoDB uses the {@code int} {@literal 0} as an indicator for an excluded field.
|
||||
*
|
||||
* @param documentField
|
||||
* @param documentField must not be {@literal null}.
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
protected boolean isAssociationConversionNecessary(Field documentField, Object value) {
|
||||
return documentField.isAssociation() && value != null
|
||||
&& (documentField.getProperty().getActualType().isAssignableFrom(value.getClass()) //
|
||||
|| documentField.getPropertyEntity().getIdProperty().getActualType().isAssignableFrom(value.getClass()));
|
||||
|
||||
Assert.notNull(documentField, "Document field must not be null!");
|
||||
|
||||
if (value == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!documentField.isAssociation()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Class<? extends Object> type = value.getClass();
|
||||
MongoPersistentProperty property = documentField.getProperty();
|
||||
|
||||
if (property.getActualType().isAssignableFrom(type)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = documentField.getPropertyEntity();
|
||||
return entity.hasIdProperty() && entity.getIdProperty().getActualType().isAssignableFrom(type);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -289,7 +306,7 @@ public class QueryMapper {
|
||||
* @return the converted mongo type or null if source is null
|
||||
*/
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return converter.convertToMongoType(source);
|
||||
return converter.convertToMongoType(source, entity == null ? null : entity.getTypeInformation());
|
||||
}
|
||||
|
||||
protected Object convertAssociation(Object source, Field field) {
|
||||
@@ -380,13 +397,20 @@ public class QueryMapper {
|
||||
*/
|
||||
public Object convertId(Object id) {
|
||||
|
||||
try {
|
||||
return conversionService.convert(id, ObjectId.class);
|
||||
} catch (ConversionException e) {
|
||||
// Ignore
|
||||
if (id == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegateConvertToMongoType(id, null);
|
||||
if (id instanceof String) {
|
||||
return ObjectId.isValid(id.toString()) ? conversionService.convert(id, ObjectId.class) : id;
|
||||
}
|
||||
|
||||
try {
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService
|
||||
.convert(id, ObjectId.class) : delegateConvertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -594,6 +618,21 @@ public class QueryMapper {
|
||||
*/
|
||||
public MetadataBackedField(String name, MongoPersistentEntity<?> entity,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
this(name, entity, context, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and
|
||||
* {@link MappingContext} with the given {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @param property may be {@literal null}.
|
||||
*/
|
||||
public MetadataBackedField(String name, MongoPersistentEntity<?> entity,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
MongoPersistentProperty property) {
|
||||
|
||||
super(name);
|
||||
|
||||
@@ -603,7 +642,7 @@ public class QueryMapper {
|
||||
this.mappingContext = context;
|
||||
|
||||
this.path = getPath(name);
|
||||
this.property = path == null ? null : path.getLeafProperty();
|
||||
this.property = path == null ? property : path.getLeafProperty();
|
||||
this.association = findAssociation();
|
||||
}
|
||||
|
||||
@@ -613,7 +652,7 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public MetadataBackedField with(String name) {
|
||||
return new MetadataBackedField(name, entity, mappingContext);
|
||||
return new MetadataBackedField(name, entity, mappingContext, property);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -25,6 +25,7 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifier;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifiers;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
@@ -79,10 +80,19 @@ public class UpdateMapper extends QueryMapper {
|
||||
return createMapEntry(field, convertSimpleOrDBObject(rawValue, field.getPropertyEntity()));
|
||||
}
|
||||
|
||||
if (!isUpdateModifier(rawValue)) {
|
||||
return super.getMappedObjectForField(field, getMappedValue(field, rawValue));
|
||||
if (isQuery(rawValue)) {
|
||||
return createMapEntry(field,
|
||||
super.getMappedObject(((Query) rawValue).getQueryObject(), field.getPropertyEntity()));
|
||||
}
|
||||
|
||||
if (isUpdateModifier(rawValue)) {
|
||||
return getMappedUpdateModifier(field, rawValue);
|
||||
}
|
||||
|
||||
return super.getMappedObjectForField(field, getMappedValue(field, rawValue));
|
||||
}
|
||||
|
||||
private Entry<String, Object> getMappedUpdateModifier(Field field, Object rawValue) {
|
||||
Object value = null;
|
||||
|
||||
if (rawValue instanceof Modifier) {
|
||||
@@ -99,7 +109,6 @@ public class UpdateMapper extends QueryMapper {
|
||||
|
||||
value = modificationOperations;
|
||||
} else {
|
||||
|
||||
throw new IllegalArgumentException(String.format("Unable to map value of type '%s'!", rawValue.getClass()));
|
||||
}
|
||||
|
||||
@@ -119,6 +128,10 @@ public class UpdateMapper extends QueryMapper {
|
||||
return value instanceof Modifier || value instanceof Modifiers;
|
||||
}
|
||||
|
||||
private boolean isQuery(Object value) {
|
||||
return value instanceof Query;
|
||||
}
|
||||
|
||||
private DBObject getMappedValue(Modifier modifier) {
|
||||
|
||||
Object value = converter.convertToMongoType(modifier.getValue(), ClassTypeInformation.OBJECT);
|
||||
|
||||
@@ -36,11 +36,12 @@ public @interface CompoundIndex {
|
||||
|
||||
/**
|
||||
* The actual index definition in JSON format. The keys of the JSON document are the fields to be indexed, the values
|
||||
* define the index direction (1 for ascending, -1 for descending).
|
||||
* define the index direction (1 for ascending, -1 for descending). <br />
|
||||
* If left empty on nested document, the whole document will be indexed.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String def();
|
||||
String def() default "";
|
||||
|
||||
/**
|
||||
* It does not actually make sense to use that attribute as the direction has to be defined in the {@link #def()}
|
||||
@@ -72,12 +73,57 @@ public @interface CompoundIndex {
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* The name of the index to be created.
|
||||
* The name of the index to be created. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* @CompoundIndex(name = "compound_index", def = "{'h1': 1, 'h2': 1}")
|
||||
* class Hybrid {
|
||||
* String h1, h2;
|
||||
* }
|
||||
*
|
||||
* @CompoundIndex(name = "compound_index", def = "{'n1': 1, 'n2': 1}")
|
||||
* class Nested {
|
||||
* String n1, n2;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: 1, hybrid.h2: 1 } , { name: "hybrid.compound_index" } )
|
||||
* db.root.ensureIndex( { nested.n1: 1, nested.n2: 1 } , { name: "nested.compound_index" } )
|
||||
* db.hybrid.ensureIndex( { h1: 1, h2: 1 } , { name: "compound_index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults
|
||||
* to {@literal false}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
boolean useGeneratedName() default false;
|
||||
|
||||
/**
|
||||
* The collection the index will be created in. Will default to the collection the annotated domain class will be
|
||||
* stored in.
|
||||
@@ -97,8 +143,11 @@ public @interface CompoundIndex {
|
||||
/**
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @deprecated TTL cannot be defined for {@link CompoundIndex} having more than one field as key. Will be removed in
|
||||
* 1.6.
|
||||
* @see http://docs.mongodb.org/manual/tutorial/expire-data/
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
int expireAfterSeconds() default -1;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Index definition to span multiple keys.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public class CompoundIndexDefinition extends Index {
|
||||
|
||||
private DBObject keys;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CompoundIndexDefinition} for the given keys.
|
||||
*
|
||||
* @param keys must not be {@literal null}.
|
||||
*/
|
||||
public CompoundIndexDefinition(DBObject keys) {
|
||||
|
||||
Assert.notNull(keys, "Keys must not be null!");
|
||||
this.keys = keys;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.Index#getIndexKeys()
|
||||
*/
|
||||
@Override
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
BasicDBObject dbo = new BasicDBObject();
|
||||
dbo.putAll(this.keys);
|
||||
dbo.putAll(super.getIndexKeys());
|
||||
return dbo;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,18 +25,62 @@ import java.lang.annotation.Target;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Laurent Canet
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface GeoSpatialIndexed {
|
||||
|
||||
/**
|
||||
* Name of the property in the document that contains the [x, y] or radial coordinates to index.
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @GeoSpatialIndexed(name="index") Point h1;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
* @GeoSpatialIndexed(name="index") Point n1;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: "2d" } , { name: "hybrid.index" } )
|
||||
* db.root.ensureIndex( { nested.n1: "2d" } , { name: "nested.index" } )
|
||||
* db.hybrid.ensureIndex( { h1: "2d" } , { name: "index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults
|
||||
* to {@literal false}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
boolean useGeneratedName() default false;
|
||||
|
||||
/**
|
||||
* Name of the collection in which to create the index.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,7 @@ import com.mongodb.DBObject;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Laurent Canet
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class GeospatialIndex implements IndexDefinition {
|
||||
|
||||
@@ -57,8 +58,6 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
*/
|
||||
public GeospatialIndex named(String name) {
|
||||
|
||||
Assert.hasText(name, "Name must have text!");
|
||||
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
@@ -151,12 +150,12 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
|
||||
public DBObject getIndexOptions() {
|
||||
|
||||
if (name == null && min == null && max == null && bucketSize == null) {
|
||||
if (!StringUtils.hasText(name) && min == null && max == null && bucketSize == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
if (name != null) {
|
||||
if (StringUtils.hasText(name)) {
|
||||
dbo.put("name", name);
|
||||
}
|
||||
|
||||
@@ -186,6 +185,7 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,13 +17,20 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class Index implements IndexDefinition {
|
||||
|
||||
@@ -41,6 +48,10 @@ public class Index implements IndexDefinition {
|
||||
|
||||
private boolean sparse = false;
|
||||
|
||||
private boolean background = false;
|
||||
|
||||
private long expire = -1;
|
||||
|
||||
public Index() {}
|
||||
|
||||
public Index(String key, Direction direction) {
|
||||
@@ -104,6 +115,44 @@ public class Index implements IndexDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the index in background (non blocking).
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
public Index background() {
|
||||
|
||||
this.background = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies TTL in seconds.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
public Index expire(long value) {
|
||||
return expire(value, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies TTL with given {@link TimeUnit}.
|
||||
*
|
||||
* @param value
|
||||
* @param unit
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
public Index expire(long value, TimeUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TimeUnit for expiration must not be null.");
|
||||
this.expire = unit.toSeconds(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see http://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping
|
||||
* @param duplicates
|
||||
@@ -125,11 +174,9 @@ public class Index implements IndexDefinition {
|
||||
}
|
||||
|
||||
public DBObject getIndexOptions() {
|
||||
if (name == null && !unique) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
if (name != null) {
|
||||
if (StringUtils.hasText(name)) {
|
||||
dbo.put("name", name);
|
||||
}
|
||||
if (unique) {
|
||||
@@ -141,6 +188,13 @@ public class Index implements IndexDefinition {
|
||||
if (sparse) {
|
||||
dbo.put("sparse", true);
|
||||
}
|
||||
if (background) {
|
||||
dbo.put("background", true);
|
||||
}
|
||||
if (expire >= 0) {
|
||||
dbo.put("expireAfterSeconds", expire);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,11 +20,11 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface IndexDefinition {
|
||||
|
||||
DBObject getIndexKeys();
|
||||
|
||||
DBObject getIndexOptions();
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
interface IndexResolver {
|
||||
|
||||
/**
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@code type}. {@link IndexDefinition}s are created
|
||||
* for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @param type
|
||||
* @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type.
|
||||
*/
|
||||
Iterable<? extends IndexDefinitionHolder> resolveIndexForClass(Class<?> type);
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,7 @@ import java.lang.annotation.Target;
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -57,12 +58,55 @@ public @interface Indexed {
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* Index name.
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @Indexed(name="index") String h1;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
* @Indexed(name="index") String n1;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: 1 } , { name: "hybrid.index" } )
|
||||
* db.root.ensureIndex( { nested.n1: 1 } , { name: "nested.index" } )
|
||||
* db.hybrid.ensureIndex( { h1: 1} , { name: "index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults
|
||||
* to {@literal false}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.5
|
||||
*/
|
||||
boolean useGeneratedName() default false;
|
||||
|
||||
/**
|
||||
* Colleciton name for index to be created on.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -60,4 +60,10 @@ public class MongoMappingEventPublisher implements ApplicationEventPublisher {
|
||||
indexCreator.onApplicationEvent((MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>) event);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationEventPublisher#publishEvent(java.lang.Object)
|
||||
*/
|
||||
public void publishEvent(Object event) {}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,19 +22,14 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
* Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext}
|
||||
@@ -45,30 +40,46 @@ import com.mongodb.util.JSON;
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
* @author Laurent Canet
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
public class MongoPersistentEntityIndexCreator implements ApplicationListener<MappingContextEvent<?, ?>> {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
|
||||
private final Map<Class<?>, Boolean> classesSeen = new ConcurrentHashMap<Class<?>, Boolean>();
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoMappingContext mappingContext;
|
||||
private final IndexResolver indexResolver;
|
||||
|
||||
/**
|
||||
* Creats a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}
|
||||
* @param mongoDbFactory must not be {@literal null}
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory) {
|
||||
this(mappingContext, mongoDbFactory, new MongoPersistentEntityIndexResolver(mappingContext));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param indexResolver must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory,
|
||||
IndexResolver indexResolver) {
|
||||
|
||||
Assert.notNull(mongoDbFactory);
|
||||
Assert.notNull(mappingContext);
|
||||
Assert.notNull(indexResolver);
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.mappingContext = mappingContext;
|
||||
this.indexResolver = indexResolver;
|
||||
|
||||
for (MongoPersistentEntity<?> entity : mappingContext.getPersistentEntities()) {
|
||||
checkForIndexes(entity);
|
||||
@@ -79,7 +90,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
public void onApplicationEvent(MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty> event) {
|
||||
public void onApplicationEvent(MappingContextEvent<?, ?> event) {
|
||||
|
||||
if (!event.wasEmittedBy(mappingContext)) {
|
||||
return;
|
||||
@@ -89,91 +100,40 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
|
||||
// Double check type as Spring infrastructure does not consider nested generics
|
||||
if (entity instanceof MongoPersistentEntity) {
|
||||
checkForIndexes(event.getPersistentEntity());
|
||||
checkForIndexes((MongoPersistentEntity<?>) entity);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkForIndexes(final MongoPersistentEntity<?> entity) {
|
||||
final Class<?> type = entity.getType();
|
||||
private void checkForIndexes(final MongoPersistentEntity<?> entity) {
|
||||
|
||||
Class<?> type = entity.getType();
|
||||
|
||||
if (!classesSeen.containsKey(type)) {
|
||||
|
||||
this.classesSeen.put(type, Boolean.TRUE);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Analyzing class " + type + " for index information.");
|
||||
}
|
||||
|
||||
// Make sure indexes get created
|
||||
if (type.isAnnotationPresent(CompoundIndexes.class)) {
|
||||
CompoundIndexes indexes = type.getAnnotation(CompoundIndexes.class);
|
||||
for (CompoundIndex index : indexes.value()) {
|
||||
|
||||
String indexColl = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
DBObject definition = (DBObject) JSON.parse(index.def());
|
||||
|
||||
ensureIndex(indexColl, index.name(), definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Created compound index " + index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
public void doWithPersistentProperty(MongoPersistentProperty property) {
|
||||
|
||||
if (property.isAnnotationPresent(Indexed.class)) {
|
||||
|
||||
Indexed index = property.findAnnotation(Indexed.class);
|
||||
String name = index.name();
|
||||
|
||||
if (!StringUtils.hasText(name)) {
|
||||
name = property.getFieldName();
|
||||
} else {
|
||||
if (!name.equals(property.getName()) && index.unique() && !index.sparse()) {
|
||||
// Names don't match, and sparse is not true. This situation will generate an error on the server.
|
||||
if (LOGGER.isWarnEnabled()) {
|
||||
LOGGER.warn("The index name " + name + " doesn't match this property name: " + property.getName()
|
||||
+ ". Setting sparse=true on this index will prevent errors when inserting documents.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
int direction = index.direction() == IndexDirection.ASCENDING ? 1 : -1;
|
||||
DBObject definition = new BasicDBObject(property.getFieldName(), direction);
|
||||
|
||||
ensureIndex(collection, name, definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Created property index " + index);
|
||||
}
|
||||
|
||||
} else if (property.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
|
||||
GeoSpatialIndexed index = property.findAnnotation(GeoSpatialIndexed.class);
|
||||
|
||||
GeospatialIndex indexObject = new GeospatialIndex(property.getFieldName());
|
||||
indexObject.withMin(index.min()).withMax(index.max());
|
||||
indexObject.named(StringUtils.hasText(index.name()) ? index.name() : property.getName());
|
||||
indexObject.typed(index.type()).withBucketSize(index.bucketSize())
|
||||
.withAdditionalField(index.additionalField());
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
mongoDbFactory.getDb().getCollection(collection)
|
||||
.ensureIndex(indexObject.getIndexKeys(), indexObject.getIndexOptions());
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Created %s for entity %s in collection %s! ", indexObject, entity.getType(),
|
||||
collection));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
classesSeen.put(type, true);
|
||||
checkForAndCreateIndexes(entity);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkForAndCreateIndexes(MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.findAnnotation(Document.class) != null) {
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexForClass(entity.getType())) {
|
||||
createIndex(indexToCreate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current index creator was registered for the given {@link MappingContext}.
|
||||
*
|
||||
@@ -183,33 +143,4 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
public boolean isIndexCreatorFor(MappingContext<?, ?> context) {
|
||||
return this.mappingContext.equals(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers the actual index creation.
|
||||
*
|
||||
* @param collection the collection to create the index in
|
||||
* @param name the name of the index about to be created
|
||||
* @param indexDefinition the index definition
|
||||
* @param unique whether it shall be a unique index
|
||||
* @param dropDups whether to drop duplicates
|
||||
* @param sparse sparse or not
|
||||
* @param background whether the index will be created in the background
|
||||
* @param expireAfterSeconds the time to live for documents in the collection
|
||||
*/
|
||||
protected void ensureIndex(String collection, String name, DBObject indexDefinition, boolean unique,
|
||||
boolean dropDups, boolean sparse, boolean background, int expireAfterSeconds) {
|
||||
|
||||
DBObject opts = new BasicDBObject();
|
||||
opts.put("name", name);
|
||||
opts.put("dropDups", dropDups);
|
||||
opts.put("sparse", sparse);
|
||||
opts.put("unique", unique);
|
||||
opts.put("background", background);
|
||||
|
||||
if (expireAfterSeconds != -1) {
|
||||
opts.put("expireAfterSeconds", expireAfterSeconds);
|
||||
}
|
||||
|
||||
mongoDbFactory.getDb().getCollection(collection).ensureIndex(indexDefinition, opts);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,569 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mongodb.core.index.Index.Duplicates;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} implementation inspecting {@link MongoPersistentEntity} for {@link MongoPersistentEntity} to be
|
||||
* indexed. <br />
|
||||
* All {@link MongoPersistentProperty} of the {@link MongoPersistentEntity} are inspected for potential indexes by
|
||||
* scanning related annotations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexResolver.class);
|
||||
|
||||
private final MongoMappingContext mappingContext;
|
||||
|
||||
/**
|
||||
* Create new {@link MongoPersistentEntityIndexResolver}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexResolver(MongoMappingContext mappingContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "Mapping context must not be null in order to resolve index definitions");
|
||||
this.mappingContext = mappingContext;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexResolver#resolveIndexForClass(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public List<IndexDefinitionHolder> resolveIndexForClass(Class<?> type) {
|
||||
return resolveIndexForEntity(mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the {@link IndexDefinition}s for given {@literal root} entity by traversing {@link MongoPersistentProperty}
|
||||
* scanning for index annotations {@link Indexed}, {@link CompoundIndex} and {@link GeospatialIndex}. The given
|
||||
* {@literal root} has therefore to be annotated with {@link Document}.
|
||||
*
|
||||
* @param root must not be null.
|
||||
* @return List of {@link IndexDefinitionHolder}. Will never be {@code null}.
|
||||
* @throws IllegalArgumentException in case of missing {@link Document} annotation marking root entities.
|
||||
*/
|
||||
public List<IndexDefinitionHolder> resolveIndexForEntity(final MongoPersistentEntity<?> root) {
|
||||
|
||||
Assert.notNull(root, "Index cannot be resolved for given 'null' entity.");
|
||||
Document document = root.findAnnotation(Document.class);
|
||||
Assert.notNull(document, "Given entity is not collection root.");
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", root.getCollection(), root));
|
||||
|
||||
final CycleGuard guard = new CycleGuard();
|
||||
|
||||
root.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
try {
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(),
|
||||
persistentProperty.getFieldName(), root.getCollection(), guard));
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(
|
||||
persistentProperty.getFieldName(), root.getCollection(), persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexInformation.add(indexDefinitionHolder);
|
||||
}
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.warn(e.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively resolve and inspect properties of given {@literal type} for indexes to be created.
|
||||
*
|
||||
* @param type
|
||||
* @param path The {@literal "dot} path.
|
||||
* @param collection
|
||||
* @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property
|
||||
* types. Will never be {@code null}.
|
||||
*/
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final Class<?> type, final String path,
|
||||
final String collection, final CycleGuard guard) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(path, collection, entity));
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(path) ? path + "." : "") + persistentProperty.getFieldName();
|
||||
guard.protect(persistentProperty, path);
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
try {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(), propertyDotPath,
|
||||
collection, guard));
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.warn(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath,
|
||||
collection, persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexInformation.add(indexDefinitionHolder);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
private IndexDefinitionHolder createIndexDefinitionHolderForProperty(String dotPath, String collection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isAnnotationPresent(Indexed.class)) {
|
||||
return createIndexDefinition(dotPath, collection, persistentProperty);
|
||||
} else if (persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
return createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> potentiallyCreateCompoundIndexDefinitions(String dotPath, String collection,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.findAnnotation(CompoundIndexes.class) == null && entity.findAnnotation(CompoundIndex.class) == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return createCompoundIndexDefinitions(dotPath, collection, entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of given type.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param fallbackCollection
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
protected List<IndexDefinitionHolder> createCompoundIndexDefinitions(String dotPath, String fallbackCollection,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
CompoundIndexes indexes = entity.findAnnotation(CompoundIndexes.class);
|
||||
|
||||
if (indexes != null) {
|
||||
for (CompoundIndex index : indexes.value()) {
|
||||
indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index, entity));
|
||||
}
|
||||
}
|
||||
|
||||
CompoundIndex index = entity.findAnnotation(CompoundIndex.class);
|
||||
|
||||
if (index != null) {
|
||||
indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index, entity));
|
||||
}
|
||||
|
||||
return indexDefinitions;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String fallbackCollection,
|
||||
CompoundIndex index, MongoPersistentEntity<?> entity) {
|
||||
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(resolveCompoundIndexKeyFromStringDefinition(
|
||||
dotPath, index.def()));
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
indexDefinition.unique(index.dropDups() ? Duplicates.DROP : Duplicates.RETAIN);
|
||||
}
|
||||
|
||||
if (index.sparse()) {
|
||||
indexDefinition.sparse();
|
||||
}
|
||||
|
||||
if (index.background()) {
|
||||
indexDefinition.background();
|
||||
}
|
||||
|
||||
int ttl = index.expireAfterSeconds();
|
||||
|
||||
if (ttl >= 0) {
|
||||
if (indexDefinition.getIndexKeys().keySet().size() > 1) {
|
||||
LOGGER.warn("TTL is not supported for compound index with more than one key. TTL={} will be ignored.", ttl);
|
||||
} else {
|
||||
indexDefinition.expire(ttl, TimeUnit.SECONDS);
|
||||
}
|
||||
}
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection;
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private DBObject resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString) {
|
||||
|
||||
if (!StringUtils.hasText(dotPath) && !StringUtils.hasText(keyDefinitionString)) {
|
||||
throw new InvalidDataAccessApiUsageException("Cannot create index on root level for empty keys.");
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(keyDefinitionString)) {
|
||||
return new BasicDBObject(dotPath, 1);
|
||||
}
|
||||
|
||||
DBObject dbo = (DBObject) JSON.parse(keyDefinitionString);
|
||||
if (!StringUtils.hasText(dotPath)) {
|
||||
return dbo;
|
||||
}
|
||||
|
||||
BasicDBObjectBuilder dboBuilder = new BasicDBObjectBuilder();
|
||||
|
||||
for (String key : dbo.keySet()) {
|
||||
dboBuilder.add(dotPath + "." + key, dbo.get(key));
|
||||
}
|
||||
return dboBuilder.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param collection
|
||||
* @param persitentProperty
|
||||
* @return
|
||||
*/
|
||||
protected IndexDefinitionHolder createIndexDefinition(String dotPath, String fallbackCollection,
|
||||
MongoPersistentProperty persitentProperty) {
|
||||
|
||||
Indexed index = persitentProperty.findAnnotation(Indexed.class);
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection;
|
||||
|
||||
Index indexDefinition = new Index().on(dotPath,
|
||||
IndexDirection.ASCENDING.equals(index.direction()) ? Sort.Direction.ASC : Sort.Direction.DESC);
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
indexDefinition.unique(index.dropDups() ? Duplicates.DROP : Duplicates.RETAIN);
|
||||
}
|
||||
|
||||
if (index.sparse()) {
|
||||
indexDefinition.sparse();
|
||||
}
|
||||
|
||||
if (index.background()) {
|
||||
indexDefinition.background();
|
||||
}
|
||||
|
||||
if (index.expireAfterSeconds() >= 0) {
|
||||
indexDefinition.expire(index.expireAfterSeconds(), TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link GeoSpatialIndexed} for
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param collection
|
||||
* @param persistentProperty
|
||||
* @return
|
||||
*/
|
||||
protected IndexDefinitionHolder createGeoSpatialIndexDefinition(String dotPath, String fallbackCollection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
GeoSpatialIndexed index = persistentProperty.findAnnotation(GeoSpatialIndexed.class);
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection;
|
||||
|
||||
GeospatialIndex indexDefinition = new GeospatialIndex(dotPath);
|
||||
indexDefinition.withBits(index.bits());
|
||||
indexDefinition.withMin(index.min()).withMax(index.max());
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persistentProperty));
|
||||
}
|
||||
|
||||
indexDefinition.typed(index.type()).withBucketSize(index.bucketSize()).withAdditionalField(index.additionalField());
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private String pathAwareIndexName(String indexName, String dotPath, MongoPersistentProperty property) {
|
||||
|
||||
String nameToUse = StringUtils.hasText(indexName) ? indexName : "";
|
||||
|
||||
if (!StringUtils.hasText(dotPath) || (property != null && dotPath.equals(property.getFieldName()))) {
|
||||
return StringUtils.hasText(nameToUse) ? nameToUse : dotPath;
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(dotPath)) {
|
||||
|
||||
nameToUse = StringUtils.hasText(nameToUse) ? (property != null ? dotPath.replace("." + property.getFieldName(),
|
||||
"") : dotPath) + "." + nameToUse : dotPath;
|
||||
}
|
||||
return nameToUse;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used
|
||||
* to detect potential cycles within the references.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class CycleGuard {
|
||||
|
||||
private final Map<String, List<Path>> propertyTypeMap;
|
||||
|
||||
CycleGuard() {
|
||||
this.propertyTypeMap = new LinkedHashMap<String, List<Path>>();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param property The property to inspect
|
||||
* @param path The path under which the property can be reached.
|
||||
* @throws CyclicPropertyReferenceException in case a potential cycle is detected.
|
||||
* @see Path#cycles(MongoPersistentProperty, String)
|
||||
*/
|
||||
void protect(MongoPersistentProperty property, String path) throws CyclicPropertyReferenceException {
|
||||
|
||||
String propertyTypeKey = createMapKey(property);
|
||||
if (propertyTypeMap.containsKey(propertyTypeKey)) {
|
||||
|
||||
List<Path> paths = propertyTypeMap.get(propertyTypeKey);
|
||||
|
||||
for (Path existingPath : paths) {
|
||||
|
||||
if (existingPath.cycles(property, path) && property.isEntity()) {
|
||||
paths.add(new Path(property, path));
|
||||
|
||||
throw new CyclicPropertyReferenceException(property.getFieldName(), property.getOwner().getType(),
|
||||
existingPath.getPath());
|
||||
}
|
||||
}
|
||||
|
||||
paths.add(new Path(property, path));
|
||||
} else {
|
||||
|
||||
ArrayList<Path> paths = new ArrayList<Path>();
|
||||
paths.add(new Path(property, path));
|
||||
propertyTypeMap.put(propertyTypeKey, paths);
|
||||
}
|
||||
}
|
||||
|
||||
private String createMapKey(MongoPersistentProperty property) {
|
||||
return property.getOwner().getType().getSimpleName() + ":" + property.getFieldName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Path defines the property and its full path from the document root. <br />
|
||||
* A {@link Path} with {@literal spring.data.mongodb} would be created for the property {@code Three.mongodb}.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class One {
|
||||
* Two spring;
|
||||
* }
|
||||
*
|
||||
* class Two {
|
||||
* Three data;
|
||||
* }
|
||||
*
|
||||
* class Three {
|
||||
* String mongodb;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class Path {
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private final String path;
|
||||
|
||||
Path(MongoPersistentProperty property, String path) {
|
||||
|
||||
this.property = property;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the given property is owned by the same entity and if it has been already visited by a subset of
|
||||
* the current path. Given {@literal foo.bar.bar} cycles if {@literal foo.bar} has already been visited and
|
||||
* {@code class Bar} contains a property of type {@code Bar}. The previously mentioned path would not cycle if
|
||||
* {@code class Bar} contained a property of type {@code SomeEntity} named {@literal bar}.
|
||||
*
|
||||
* @param property
|
||||
* @param path
|
||||
* @return
|
||||
*/
|
||||
boolean cycles(MongoPersistentProperty property, String path) {
|
||||
|
||||
if (!property.getOwner().equals(this.property.getOwner())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return path.equals(this.path) || path.contains(this.path + ".") || path.contains("." + this.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public static class CyclicPropertyReferenceException extends RuntimeException {
|
||||
|
||||
private static final long serialVersionUID = -3762979307658772277L;
|
||||
|
||||
private final String propertyName;
|
||||
private final Class<?> type;
|
||||
private final String dotPath;
|
||||
|
||||
public CyclicPropertyReferenceException(String propertyName, Class<?> type, String dotPath) {
|
||||
|
||||
this.propertyName = propertyName;
|
||||
this.type = type;
|
||||
this.dotPath = dotPath;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Throwable#getMessage()
|
||||
*/
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return String.format("Found cycle for field '%s' in type '%s' for path '%s'", propertyName,
|
||||
type != null ? type.getSimpleName() : "unknown", dotPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of {@link IndexDefinition} holding additional (property)path information used for creating the
|
||||
* index. The path itself is the properties {@literal "dot"} path representation from its root document.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
public static class IndexDefinitionHolder implements IndexDefinition {
|
||||
|
||||
private final String path;
|
||||
private final IndexDefinition indexDefinition;
|
||||
private final String collection;
|
||||
|
||||
/**
|
||||
* Create
|
||||
*
|
||||
* @param path
|
||||
*/
|
||||
public IndexDefinitionHolder(String path, IndexDefinition definition, String collection) {
|
||||
|
||||
this.path = path;
|
||||
this.indexDefinition = definition;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
public String getCollection() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@literal "dot"} path used to create the index.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@literal raw} {@link IndexDefinition}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public IndexDefinition getIndexDefinition() {
|
||||
return indexDefinition;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys()
|
||||
*/
|
||||
@Override
|
||||
public DBObject getIndexKeys() {
|
||||
return indexDefinition.getIndexKeys();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions()
|
||||
*/
|
||||
@Override
|
||||
public DBObject getIndexOptions() {
|
||||
return indexDefinition.getIndexOptions();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -51,7 +51,7 @@ import org.springframework.util.StringUtils;
|
||||
public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, MongoPersistentProperty> implements
|
||||
MongoPersistentEntity<T>, ApplicationContextAware {
|
||||
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @DocumentField annotation!";
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @Field annotation!";
|
||||
private final String collection;
|
||||
private final SpelExpressionParser parser;
|
||||
private final StandardEvaluationContext context;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,6 +30,8 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
private Boolean isIdProperty;
|
||||
private Boolean isAssociation;
|
||||
private String fieldName;
|
||||
private Boolean usePropertyAccess;
|
||||
private Boolean isTransient;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CachingMongoPersistentProperty}.
|
||||
@@ -84,4 +86,32 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#usePropertyAccess()
|
||||
*/
|
||||
@Override
|
||||
public boolean usePropertyAccess() {
|
||||
|
||||
if (this.usePropertyAccess == null) {
|
||||
this.usePropertyAccess = super.usePropertyAccess();
|
||||
}
|
||||
|
||||
return this.usePropertyAccess;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#isTransient()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransient() {
|
||||
|
||||
if (this.isTransient == null) {
|
||||
this.isTransient = super.isTransient();
|
||||
}
|
||||
|
||||
return this.isTransient;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,8 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* {@link FieldNamingStrategy} that abbreviates field names by using the very first letter of the camel case parts of
|
||||
* the {@link MongoPersistentProperty}'s name.
|
||||
@@ -24,23 +22,18 @@ import java.util.Locale;
|
||||
* @since 1.3
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class CamelCaseAbbreviatingFieldNamingStrategy implements FieldNamingStrategy {
|
||||
public class CamelCaseAbbreviatingFieldNamingStrategy extends CamelCaseSplittingFieldNamingStrategy {
|
||||
|
||||
private static final String CAMEL_CASE_PATTERN = "(?<!(^|[A-Z]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])";
|
||||
public CamelCaseAbbreviatingFieldNamingStrategy() {
|
||||
super("");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.FieldNamingStrategy#getFieldName(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
|
||||
* @see org.springframework.data.mongodb.core.mapping.CamelCaseSplittingFieldNamingStrategy#preparePart(java.lang.String)
|
||||
*/
|
||||
public String getFieldName(MongoPersistentProperty property) {
|
||||
|
||||
String[] parts = property.getName().split(CAMEL_CASE_PATTERN);
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
||||
for (String part : parts) {
|
||||
builder.append(part.substring(0, 1).toLowerCase(Locale.US));
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
@Override
|
||||
protected String preparePart(String part) {
|
||||
return part.substring(0, 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.util.ParsingUtils;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Configurable {@link FieldNamingStrategy} that splits up camel-case property names and reconcatenates them using a
|
||||
* configured delimiter. Individual parts of the name can be manipulated using {@link #preparePart(String)}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @since 1.5
|
||||
*/
|
||||
public class CamelCaseSplittingFieldNamingStrategy implements FieldNamingStrategy {
|
||||
|
||||
private final String delimiter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CamelCaseSplittingFieldNamingStrategy}.
|
||||
*
|
||||
* @param delimiter must not be {@literal null}.
|
||||
*/
|
||||
public CamelCaseSplittingFieldNamingStrategy(String delimiter) {
|
||||
|
||||
Assert.notNull(delimiter, "Delimiter must not be null!");
|
||||
this.delimiter = delimiter;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.FieldNamingStrategy#getFieldName(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
|
||||
*/
|
||||
@Override
|
||||
public String getFieldName(MongoPersistentProperty property) {
|
||||
|
||||
List<String> parts = ParsingUtils.splitCamelCaseToLower(property.getName());
|
||||
List<String> result = new ArrayList<String>();
|
||||
|
||||
for (String part : parts) {
|
||||
|
||||
String candidate = preparePart(part);
|
||||
|
||||
if (StringUtils.hasText(candidate)) {
|
||||
result.add(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
return StringUtils.collectionToDelimitedString(result, delimiter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback to prepare the uncapitalized part obtained from the split up of the camel case source. Default
|
||||
* implementation returns the part as is.
|
||||
*
|
||||
* @param part
|
||||
* @return
|
||||
*/
|
||||
protected String preparePart(String part) {
|
||||
return part;
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,7 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
* @see DocumentField
|
||||
* @see PropertyNameFieldNamingStrategy
|
||||
* @see CamelCaseAbbreviatingFieldNamingStrategy
|
||||
* @see SnakeCaseFieldNamingStrategy
|
||||
* @since 1.3
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -44,7 +44,7 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
int getFieldOrder();
|
||||
|
||||
/**
|
||||
* Returns whether the propert is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* Returns whether the property is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* {@link #getDBRef()} to return an non-{@literal null} value.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
/**
|
||||
* {@link FieldNamingStrategy} that translates typical camel case Java property names to lower case JSON element names,
|
||||
* separated by underscores.
|
||||
*
|
||||
* @since 1.5
|
||||
* @author Ryan Tenney
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class SnakeCaseFieldNamingStrategy extends CamelCaseSplittingFieldNamingStrategy {
|
||||
|
||||
/**
|
||||
* Creates a new {@link SnakeCaseFieldNamingStrategy}.
|
||||
*/
|
||||
public SnakeCaseFieldNamingStrategy() {
|
||||
super("_");
|
||||
}
|
||||
}
|
||||
@@ -126,13 +126,13 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
|
||||
public void onAfterDelete(DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({})", dbo);
|
||||
LOG.debug("onAfterDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
public void onBeforeDelete(DBObject dbo) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({})", dbo);
|
||||
LOG.debug("onBeforeDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -87,12 +88,8 @@ public class BasicUpdate extends Update {
|
||||
|
||||
@Override
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
DBObject keyValue = new BasicDBObject();
|
||||
keyValue.put(key, convertedValues);
|
||||
keyValue.put(key, Arrays.copyOf(values, values.length));
|
||||
updateObject.put("$pullAll", keyValue);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -31,7 +31,9 @@ import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -529,8 +531,11 @@ public class Criteria implements CriteriaDefinition {
|
||||
* @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getCriteriaObject()
|
||||
*/
|
||||
public DBObject getCriteriaObject() {
|
||||
|
||||
if (this.criteriaChain.size() == 1) {
|
||||
return criteriaChain.get(0).getSingleCriteriaObject();
|
||||
} else if (CollectionUtils.isEmpty(this.criteriaChain) && !CollectionUtils.isEmpty(this.criteria)) {
|
||||
return getSingleCriteriaObject();
|
||||
} else {
|
||||
DBObject criteriaObject = new BasicDBObject();
|
||||
for (Criteria c : this.criteriaChain) {
|
||||
@@ -564,6 +569,13 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(this.key)) {
|
||||
if (not) {
|
||||
return new BasicDBObject("$not", dbo);
|
||||
}
|
||||
return dbo;
|
||||
}
|
||||
|
||||
DBObject queryCriteria = new BasicDBObject();
|
||||
|
||||
if (!NOT_SET.equals(isValue)) {
|
||||
|
||||
@@ -166,7 +166,7 @@ public class Query {
|
||||
|
||||
for (Order order : sort) {
|
||||
if (order.isIgnoreCase()) {
|
||||
throw new IllegalArgumentException(String.format("Gven sort contained an Order for %s with ignore case! "
|
||||
throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case! "
|
||||
+ "MongoDB does not support sorting ignoreing case currently!", order.getProperty()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,10 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.springframework.util.ObjectUtils.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
@@ -40,6 +41,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @author Becca Gaspard
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class Update {
|
||||
|
||||
@@ -62,7 +64,7 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exlude fields from making
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exclude fields from making
|
||||
* it into the created {@link Update} object. Note, that this will set attributes directly and <em>not</em> use
|
||||
* {@literal $set}. This means fields not given in the {@link DBObject} will be nulled when executing the update. To
|
||||
* create an only-updating {@link Update} instance of a {@link DBObject}, call {@link #set(String, Object)} for each
|
||||
@@ -187,12 +189,7 @@ public class Update {
|
||||
* @return
|
||||
*/
|
||||
public Update pushAll(String key, Object[] values) {
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
addMultiFieldOperation("$pushAll", key, convertedValues);
|
||||
addMultiFieldOperation("$pushAll", key, Arrays.copyOf(values, values.length));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -256,12 +253,7 @@ public class Update {
|
||||
* @return
|
||||
*/
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
addFieldOperation("$pullAll", key, convertedValues);
|
||||
addFieldOperation("$pullAll", key, Arrays.copyOf(values, values.length));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -279,6 +271,7 @@ public class Update {
|
||||
}
|
||||
|
||||
public DBObject getUpdateObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : modifierOps.keySet()) {
|
||||
dbo.put(k, modifierOps.get(k));
|
||||
@@ -335,14 +328,52 @@ public class Update {
|
||||
return StringUtils.startsWithIgnoreCase(key, "$");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getUpdateObject().hashCode();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Update that = (Update) obj;
|
||||
return this.getUpdateObject().equals(that.getUpdateObject());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return SerializationUtils.serializeToJsonSafely(getUpdateObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Modifiers holds a distinct collection of {@link Modifier}
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class Modifiers {
|
||||
|
||||
private HashMap<String, Modifier> modifiers;
|
||||
private Map<String, Modifier> modifiers;
|
||||
|
||||
public Modifiers() {
|
||||
this.modifiers = new LinkedHashMap<String, Modifier>(1);
|
||||
@@ -355,6 +386,33 @@ public class Update {
|
||||
public void addModifier(Modifier modifier) {
|
||||
this.modifiers.put(modifier.getKey(), modifier);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return nullSafeHashCode(modifiers);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Modifiers that = (Modifiers) obj;
|
||||
|
||||
return this.modifiers.equals(that.modifiers);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -379,6 +437,7 @@ public class Update {
|
||||
* Implementation of {@link Modifier} representing {@code $each}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class Each implements Modifier {
|
||||
|
||||
@@ -398,29 +457,60 @@ public class Update {
|
||||
return ((Collection<?>) values[0]).toArray();
|
||||
}
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
|
||||
return convertedValues;
|
||||
return Arrays.copyOf(values, values.length);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.Update.Modifier#getKey()
|
||||
*/
|
||||
@Override
|
||||
public String getKey() {
|
||||
return "$each";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.Update.Modifier#getValue()
|
||||
*/
|
||||
@Override
|
||||
public Object getValue() {
|
||||
return this.values;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return nullSafeHashCode(values);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object that) {
|
||||
|
||||
if (this == that) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (that == null || getClass() != that.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return nullSafeEquals(values, ((Each) that).values);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@code $push} modifiers
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class PushOperatorBuilder {
|
||||
|
||||
@@ -453,6 +543,50 @@ public class Update {
|
||||
public Update value(Object value) {
|
||||
return Update.this.push(key, value);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * result + getOuterType().hashCode();
|
||||
result += 31 * result + nullSafeHashCode(key);
|
||||
result += 31 * result + nullSafeHashCode(modifiers);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PushOperatorBuilder that = (PushOperatorBuilder) obj;
|
||||
|
||||
if (!getOuterType().equals(that.getOuterType())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return nullSafeEquals(this.key, that.key) && nullSafeEquals(this.modifiers, that.modifiers);
|
||||
}
|
||||
|
||||
private Update getOuterType() {
|
||||
return Update.this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -488,7 +622,5 @@ public class Update {
|
||||
public Update value(Object value) {
|
||||
return Update.this.addToSet(this.key, value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@ import org.springframework.data.annotation.QueryAnnotation;
|
||||
public @interface Query {
|
||||
|
||||
/**
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precendece over the
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the
|
||||
* method name then.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -105,6 +105,8 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati
|
||||
@Override
|
||||
public void registerBeansForRoot(BeanDefinitionRegistry registry, RepositoryConfigurationSource configurationSource) {
|
||||
|
||||
super.registerBeansForRoot(registry, configurationSource);
|
||||
|
||||
if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME)) {
|
||||
|
||||
RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class);
|
||||
|
||||
@@ -284,7 +284,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return countProjection ? operations.count(query, metadata.getJavaType()) : operations.findOne(query,
|
||||
metadata.getJavaType());
|
||||
metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -388,10 +388,10 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
private Object deleteAndConvertResult(Query query, MongoEntityMetadata<?> metadata) {
|
||||
|
||||
if (method.isCollectionQuery()) {
|
||||
return operations.findAllAndRemove(query, metadata.getJavaType());
|
||||
return operations.findAllAndRemove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
WriteResult writeResult = operations.remove(query, metadata.getCollectionName());
|
||||
WriteResult writeResult = operations.remove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
return writeResult != null ? writeResult.getN() : 0L;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,13 +20,16 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
@@ -40,16 +43,19 @@ import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom query creator to create Mongo criterias.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MongoQueryCreator.class);
|
||||
private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}");
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final boolean isGeoNearQuery;
|
||||
|
||||
@@ -198,7 +204,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
case CONTAINING:
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
return createContainingCriteria(part, property, criteria, parameters);
|
||||
case REGEX:
|
||||
return criteria.regex(parameters.next().toString());
|
||||
case EXISTS:
|
||||
@@ -216,7 +222,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
if (distance == null) {
|
||||
return criteria.near(point);
|
||||
} else {
|
||||
if (distance.getMetric() != null) {
|
||||
if (!Metrics.NEUTRAL.equals(distance.getMetric())) {
|
||||
criteria.nearSphere(point);
|
||||
} else {
|
||||
criteria.near(point);
|
||||
@@ -269,19 +275,23 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters, boolean shouldNegateExpression) {
|
||||
|
||||
PropertyPath path = part.getProperty().getLeafProperty();
|
||||
|
||||
switch (part.shouldIgnoreCase()) {
|
||||
|
||||
case ALWAYS:
|
||||
if (part.getProperty().getType() != String.class) {
|
||||
throw new IllegalArgumentException(String.format("part %s must be of type String but was %s",
|
||||
part.getProperty(), part.getType()));
|
||||
if (path.getType() != String.class) {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Part %s must be of type String but was %s", path, path.getType()));
|
||||
}
|
||||
// fall-through
|
||||
|
||||
case WHEN_POSSIBLE:
|
||||
|
||||
if (shouldNegateExpression) {
|
||||
criteria = criteria.not();
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.nextConverted(property).toString());
|
||||
|
||||
case NEVER:
|
||||
@@ -292,6 +302,27 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
Arrays.asList(IgnoreCaseType.ALWAYS, IgnoreCaseType.WHEN_POSSIBLE), part.shouldIgnoreCase()));
|
||||
}
|
||||
|
||||
/**
|
||||
* If the target property of the comparison is of type String, then the operator checks for match using regular
|
||||
* expression. If the target property of the comparison is a {@link Collection} then the operator evaluates to true if
|
||||
* it finds an exact match within any member of the {@link Collection}.
|
||||
*
|
||||
* @param part
|
||||
* @param property
|
||||
* @param criteria
|
||||
* @param parameters
|
||||
* @return
|
||||
*/
|
||||
private Criteria createContainingCriteria(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an appropriate like-regex and appends it to the given criteria.
|
||||
*
|
||||
@@ -337,8 +368,8 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return (T) parameter;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected parameter type of %s but got %s!", type,
|
||||
parameter.getClass()));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(PotentiallyConvertingIterator iterator, MongoPersistentProperty property) {
|
||||
@@ -356,23 +387,57 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
Type type = part.getType();
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, part);
|
||||
|
||||
switch (type) {
|
||||
case STARTING_WITH:
|
||||
source = "^" + source;
|
||||
regex = "^" + regex;
|
||||
break;
|
||||
case ENDING_WITH:
|
||||
source = source + "$";
|
||||
regex = regex + "$";
|
||||
break;
|
||||
case CONTAINING:
|
||||
source = "*" + source + "*";
|
||||
regex = ".*" + regex + ".*";
|
||||
break;
|
||||
case SIMPLE_PROPERTY:
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
source = "^" + source + "$";
|
||||
regex = "^" + regex + "$";
|
||||
default:
|
||||
}
|
||||
|
||||
return source.replaceAll("\\*", ".*");
|
||||
return regex;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Part qpart) {
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(Type.LIKE, qpart.getType())) {
|
||||
return PUNCTATION_PATTERN.matcher(source).find() ? Pattern.quote(source) : source;
|
||||
}
|
||||
|
||||
if (source.equals("*")) {
|
||||
return ".*";
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
boolean leadingWildcard = source.startsWith("*");
|
||||
boolean trailingWildcard = source.endsWith("*");
|
||||
|
||||
String valueToUse = source.substring(leadingWildcard ? 1 : 0,
|
||||
trailingWildcard ? source.length() - 1 : source.length());
|
||||
|
||||
if (PUNCTATION_PATTERN.matcher(valueToUse).find()) {
|
||||
valueToUse = Pattern.quote(valueToUse);
|
||||
}
|
||||
|
||||
if (leadingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
sb.append(valueToUse);
|
||||
if (trailingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,8 +126,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(),
|
||||
collectionEntity.getCollection());
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(), collectionEntity);
|
||||
}
|
||||
|
||||
return this.metadata;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2014 the original author or authors.
|
||||
* Copyright 2002-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,10 +19,14 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* {@link RepositoryQuery} implementation for Mongo.
|
||||
@@ -67,7 +71,24 @@ public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
protected Query createQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, accessor, context, isGeoNearQuery);
|
||||
return creator.createQuery();
|
||||
Query query = creator.createQuery();
|
||||
|
||||
String fieldSpec = this.getQueryMethod().getFieldSpecification();
|
||||
|
||||
if (!StringUtils.hasText(fieldSpec)) {
|
||||
return query;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
BasicQuery result = new BasicQuery(query.getQueryObject().toString(), fieldSpec);
|
||||
result.setSortObject(query.getSortObject());
|
||||
return result;
|
||||
|
||||
} catch (JSONParseException o_O) {
|
||||
throw new IllegalStateException(String.format("Invalid query or field specification in %s!", getQueryMethod()),
|
||||
o_O);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -25,21 +26,22 @@ import org.springframework.util.Assert;
|
||||
class SimpleMongoEntityMetadata<T> implements MongoEntityMetadata<T> {
|
||||
|
||||
private final Class<T> type;
|
||||
private final String collectionName;
|
||||
private final MongoPersistentEntity<?> collectionEntity;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoEntityMetadata} using the given type and collection name.
|
||||
* Creates a new {@link SimpleMongoEntityMetadata} using the given type and {@link MongoPersistentEntity} to use for
|
||||
* collection lookups.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @param collectionEntity must not be {@literal null} or empty.
|
||||
*/
|
||||
public SimpleMongoEntityMetadata(Class<T> type, String collectionName) {
|
||||
public SimpleMongoEntityMetadata(Class<T> type, MongoPersistentEntity<?> collectionEntity) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(collectionEntity, "Collection entity must not be null or empty!");
|
||||
|
||||
this.type = type;
|
||||
this.collectionName = collectionName;
|
||||
this.collectionEntity = collectionEntity;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -55,6 +57,6 @@ class SimpleMongoEntityMetadata<T> implements MongoEntityMetadata<T> {
|
||||
* @see org.springframework.data.mongodb.repository.query.MongoEntityMetadata#getCollectionName()
|
||||
*/
|
||||
public String getCollectionName() {
|
||||
return collectionName;
|
||||
return collectionEntity.getCollection();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@@ -23,7 +26,9 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
@@ -31,17 +36,20 @@ import com.mongodb.util.JSON;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
|
||||
private static final String COUND_AND_DELETE = "Manually defined query for %s cannot be both a count and delete query at the same time!";
|
||||
private static final Pattern PLACEHOLDER = Pattern.compile("\\?(\\d+)");
|
||||
private static final Logger LOG = LoggerFactory.getLogger(StringBasedMongoQuery.class);
|
||||
private static final ParameterBindingParser PARSER = ParameterBindingParser.INSTANCE;
|
||||
|
||||
private final String query;
|
||||
private final String fieldSpec;
|
||||
private final boolean isCountQuery;
|
||||
private final boolean isDeleteQuery;
|
||||
private final List<ParameterBinding> queryParameterBindings;
|
||||
private final List<ParameterBinding> fieldSpecParameterBindings;
|
||||
|
||||
/**
|
||||
* Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}.
|
||||
@@ -65,7 +73,11 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
super(method, mongoOperations);
|
||||
|
||||
this.query = query;
|
||||
this.queryParameterBindings = PARSER.parseParameterBindingsFrom(query);
|
||||
|
||||
this.fieldSpec = method.getFieldSpecification();
|
||||
this.fieldSpecParameterBindings = PARSER.parseParameterBindingsFrom(method.getFieldSpecification());
|
||||
|
||||
this.isCountQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().count() : false;
|
||||
this.isDeleteQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().delete() : false;
|
||||
|
||||
@@ -81,12 +93,12 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
@Override
|
||||
protected Query createQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
String queryString = replacePlaceholders(query, accessor);
|
||||
String queryString = replacePlaceholders(query, accessor, queryParameterBindings);
|
||||
|
||||
Query query = null;
|
||||
|
||||
if (fieldSpec != null) {
|
||||
String fieldString = replacePlaceholders(fieldSpec, accessor);
|
||||
String fieldString = replacePlaceholders(fieldSpec, accessor, fieldSpecParameterBindings);
|
||||
query = new BasicQuery(queryString, fieldString);
|
||||
} else {
|
||||
query = new BasicQuery(queryString);
|
||||
@@ -119,21 +131,174 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
return this.isDeleteQuery;
|
||||
}
|
||||
|
||||
private String replacePlaceholders(String input, ConvertingParameterAccessor accessor) {
|
||||
/**
|
||||
* Replaced the parameter place-holders with the actual parameter values from the given {@link ParameterBinding}s.
|
||||
*
|
||||
* @param input
|
||||
* @param accessor
|
||||
* @param bindings
|
||||
* @return
|
||||
*/
|
||||
private String replacePlaceholders(String input, ConvertingParameterAccessor accessor, List<ParameterBinding> bindings) {
|
||||
|
||||
Matcher matcher = PLACEHOLDER.matcher(input);
|
||||
String result = input;
|
||||
|
||||
while (matcher.find()) {
|
||||
String group = matcher.group();
|
||||
int index = Integer.parseInt(matcher.group(1));
|
||||
result = result.replace(group, getParameterWithIndex(accessor, index));
|
||||
if (bindings.isEmpty()) {
|
||||
return input;
|
||||
}
|
||||
|
||||
return result;
|
||||
StringBuilder result = new StringBuilder(input);
|
||||
|
||||
for (ParameterBinding binding : bindings) {
|
||||
|
||||
String parameter = binding.getParameter();
|
||||
int idx = result.indexOf(parameter);
|
||||
|
||||
if (idx != -1) {
|
||||
result.replace(idx, idx + parameter.length(), getParameterValueForBinding(accessor, binding));
|
||||
}
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
private String getParameterWithIndex(ConvertingParameterAccessor accessor, int index) {
|
||||
return JSON.serialize(accessor.getBindableValue(index));
|
||||
/**
|
||||
* Returns the serialized value to be used for the given {@link ParameterBinding}.
|
||||
*
|
||||
* @param accessor
|
||||
* @param binding
|
||||
* @return
|
||||
*/
|
||||
private String getParameterValueForBinding(ConvertingParameterAccessor accessor, ParameterBinding binding) {
|
||||
|
||||
Object value = accessor.getBindableValue(binding.getParameterIndex());
|
||||
|
||||
if (value instanceof String && binding.isQuoted()) {
|
||||
return (String) value;
|
||||
}
|
||||
|
||||
return JSON.serialize(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* A parser that extracts the parameter bindings from a given query string.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static enum ParameterBindingParser {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
private static final String PARAMETER_PREFIX = "_param_";
|
||||
private static final String PARSEABLE_PARAMETER = "\"" + PARAMETER_PREFIX + "$1\"";
|
||||
private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)");
|
||||
private static final Pattern PARSEABLE_BINDING_PATTERN = Pattern.compile("\"?" + PARAMETER_PREFIX + "(\\d+)\"?");
|
||||
|
||||
private final static int PARAMETER_INDEX_GROUP = 1;
|
||||
|
||||
/**
|
||||
* Returns a list of {@link ParameterBinding}s found in the given {@code input} or an
|
||||
* {@link Collections#emptyList()}.
|
||||
*
|
||||
* @param input
|
||||
* @return
|
||||
*/
|
||||
public List<ParameterBinding> parseParameterBindingsFrom(String input) {
|
||||
|
||||
if (!StringUtils.hasText(input)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<ParameterBinding> bindings = new ArrayList<ParameterBinding>();
|
||||
|
||||
String parseableInput = makeParameterReferencesParseable(input);
|
||||
|
||||
collectParameterReferencesIntoBindings(bindings, JSON.parse(parseableInput));
|
||||
|
||||
return bindings;
|
||||
}
|
||||
|
||||
private String makeParameterReferencesParseable(String input) {
|
||||
|
||||
Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(input);
|
||||
String parseableInput = matcher.replaceAll(PARSEABLE_PARAMETER);
|
||||
|
||||
return parseableInput;
|
||||
}
|
||||
|
||||
private void collectParameterReferencesIntoBindings(List<ParameterBinding> bindings, Object value) {
|
||||
|
||||
if (value instanceof String) {
|
||||
|
||||
String string = ((String) value).trim();
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(string);
|
||||
while (valueMatcher.find()) {
|
||||
int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP));
|
||||
boolean quoted = (string.startsWith("'") && string.endsWith("'"))
|
||||
|| (string.startsWith("\"") && string.endsWith("\""));
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
|
||||
} else if (value instanceof Pattern) {
|
||||
|
||||
String string = ((Pattern) value).toString().trim();
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(string);
|
||||
while (valueMatcher.find()) {
|
||||
int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP));
|
||||
|
||||
/*
|
||||
* The pattern is used as a direct parameter replacement, e.g. 'field': ?1,
|
||||
* therefore we treat it as not quoted to remain backwards compatible.
|
||||
*/
|
||||
boolean quoted = !string.equals(PARAMETER_PREFIX + paramIndex);
|
||||
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
|
||||
} else if (value instanceof DBObject) {
|
||||
|
||||
DBObject dbo = (DBObject) value;
|
||||
|
||||
for (String field : dbo.keySet()) {
|
||||
collectParameterReferencesIntoBindings(bindings, field);
|
||||
collectParameterReferencesIntoBindings(bindings, dbo.get(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A generic parameter binding with name or position information.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class ParameterBinding {
|
||||
|
||||
private final int parameterIndex;
|
||||
private final boolean quoted;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ParameterBinding} with the given {@code parameterIndex} and {@code quoted} information.
|
||||
*
|
||||
* @param parameterIndex
|
||||
* @param quoted whether or not the parameter is already quoted.
|
||||
*/
|
||||
public ParameterBinding(int parameterIndex, boolean quoted) {
|
||||
|
||||
this.parameterIndex = parameterIndex;
|
||||
this.quoted = quoted;
|
||||
}
|
||||
|
||||
public boolean isQuoted() {
|
||||
return quoted;
|
||||
}
|
||||
|
||||
public int getParameterIndex() {
|
||||
return parameterIndex;
|
||||
}
|
||||
|
||||
public String getParameter() {
|
||||
return "?" + parameterIndex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mysema.query.apt.Configuration;
|
||||
import com.mysema.query.apt.DefaultConfiguration;
|
||||
|
||||
/**
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annoated classes.
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annotated classes.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -25,7 +28,10 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
import com.mysema.query.mongodb.MongodbSerializer;
|
||||
import com.mysema.query.types.Constant;
|
||||
import com.mysema.query.types.Operation;
|
||||
import com.mysema.query.types.Path;
|
||||
import com.mysema.query.types.PathMetadata;
|
||||
import com.mysema.query.types.PathType;
|
||||
@@ -34,9 +40,22 @@ import com.mysema.query.types.PathType;
|
||||
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
private static final Set<PathType> PATH_TYPES;
|
||||
|
||||
static {
|
||||
|
||||
Set<PathType> pathTypes = new HashSet<PathType>();
|
||||
pathTypes.add(PathType.VARIABLE);
|
||||
pathTypes.add(PathType.PROPERTY);
|
||||
|
||||
PATH_TYPES = Collections.unmodifiableSet(pathTypes);
|
||||
}
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final QueryMapper mapper;
|
||||
@@ -44,7 +63,7 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
/**
|
||||
* Creates a new {@link SpringDataMongodbSerializer} for the given {@link MappingContext}.
|
||||
*
|
||||
* @param mappingContext
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public SpringDataMongodbSerializer(MongoConverter converter) {
|
||||
|
||||
@@ -80,10 +99,63 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
@Override
|
||||
protected DBObject asDBObject(String key, Object value) {
|
||||
|
||||
if ("_id".equals(key)) {
|
||||
return super.asDBObject(key, mapper.convertId(value));
|
||||
if (ID_KEY.equals(key)) {
|
||||
return mapper.getMappedObject(super.asDBObject(key, value), null);
|
||||
}
|
||||
|
||||
return super.asDBObject(key, value instanceof Pattern ? value : converter.convertToMongoType(value));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#isReference(com.mysema.query.types.Path)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isReference(Path<?> path) {
|
||||
|
||||
MongoPersistentProperty property = getPropertyFor(path);
|
||||
return property == null ? false : property.isAssociation();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected DBRef asReference(Object constant) {
|
||||
return converter.toDBRef(constant, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(com.mysema.query.types.Operation, int)
|
||||
*/
|
||||
@Override
|
||||
protected DBRef asReference(Operation<?> expr, int constIndex) {
|
||||
|
||||
for (Object arg : expr.getArgs()) {
|
||||
|
||||
if (arg instanceof Path) {
|
||||
|
||||
MongoPersistentProperty property = getPropertyFor((Path<?>) arg);
|
||||
Object constant = ((Constant<?>) expr.getArg(constIndex)).getConstant();
|
||||
|
||||
return converter.toDBRef(constant, property);
|
||||
}
|
||||
}
|
||||
|
||||
return super.asReference(expr, constIndex);
|
||||
}
|
||||
|
||||
private MongoPersistentProperty getPropertyFor(Path<?> path) {
|
||||
|
||||
Path<?> parent = path.getMetadata().getParent();
|
||||
|
||||
if (parent == null || !PATH_TYPES.contains(path.getMetadata().getPathType())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
|
||||
return entity != null ? entity.getPersistentProperty(path.getMetadata().getName()) : null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,4 +3,5 @@ http\://www.springframework.org/schema/data/mongo/spring-mongo-1.1.xsd=org/sprin
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.2.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.3.xsd=org/springframework/data/mongodb/config/spring-mongo-1.3.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.4.xsd=org/springframework/data/mongodb/config/spring-mongo-1.4.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.4.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo-1.5.xsd=org/springframework/data/mongodb/config/spring-mongo-1.5.xsd
|
||||
http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-1.5.xsd
|
||||
|
||||
@@ -0,0 +1,657 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<xsd:schema xmlns="http://www.springframework.org/schema/data/mongo"
|
||||
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:beans="http://www.springframework.org/schema/beans"
|
||||
xmlns:tool="http://www.springframework.org/schema/tool"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:repository="http://www.springframework.org/schema/data/repository"
|
||||
targetNamespace="http://www.springframework.org/schema/data/mongo"
|
||||
elementFormDefault="qualified" attributeFormDefault="unqualified">
|
||||
|
||||
<xsd:import namespace="http://www.springframework.org/schema/beans" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/tool" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/context" />
|
||||
<xsd:import namespace="http://www.springframework.org/schema/data/repository"
|
||||
schemaLocation="http://www.springframework.org/schema/data/repository/spring-repository.xsd" />
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.Mongo"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="db-factory">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongo instance. If not configured a default com.mongodb.Mongo instance will be created.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="dbname" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the database to connect to. Default is 'db'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="authentication-dbname" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the authentication database to connect to. Default is 'db'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="username" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The username to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="password" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The password to use when connecting to a MongoDB server.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="uri" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The Mongo URI string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:attributeGroup name="mongo-repository-attributes">
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" default="mongoTemplate">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="create-query-indexes" type="xsd:boolean" default="false">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
Enables creation of indexes for queries that get derived from the method name
|
||||
and thus reference domain class properties. Defaults to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:attributeGroup>
|
||||
|
||||
<xsd:element name="repositories">
|
||||
<xsd:complexType>
|
||||
<xsd:complexContent>
|
||||
<xsd:extension base="repository:repositories">
|
||||
<xsd:attributeGroup ref="mongo-repository-attributes"/>
|
||||
<xsd:attributeGroup ref="repository:repository-attributes"/>
|
||||
</xsd:extension>
|
||||
</xsd:complexContent>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="mapping-converter">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[Defines a MongoConverter for getting rich mapping functionality.]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:exports type="org.springframework.data.mongodb.core.convert.MappingMongoConverter" />
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="custom-converters" minOccurs="0">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Top-level element that contains one or more custom converters to be used for mapping
|
||||
domain objects to and from Mongo's DBObject]]>
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="converter" type="customConverterType" minOccurs="0" maxOccurs="unbounded"/>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="base-package" type="xsd:string" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the MappingMongoConverter instance (by default "mappingConverter").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="base-package" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The base package in which to scan for entities annotated with @Document
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="type-mapper-ref" type="typeMapperRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a MongoTypeMapper to be used by this MappingMongoConverter.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mapping.model.MappingContext">
|
||||
The reference to a MappingContext. Will default to 'mappingContext'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="disable-validation" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener">
|
||||
Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="abbreviate-field-names" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.CamelCaseAbbreviatingFieldNamingStrategy">
|
||||
Enables abbreviating the field names for domain class properties to the
|
||||
first character of their camel case names, e.g. fooBar -> fb. Defaults to false.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="field-naming-strategy-ref" type="fieldNamingStrategyRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.mapping.FieldNamingStrategy">
|
||||
The reference to a FieldNamingStrategy.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="jmx">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a JMX Model MBeans for monitoring a MongoDB server'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="mongo-ref" type="mongoRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the Mongo object that determines what server to monitor. (by default "mongo").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="auditing">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="org.springframework.data.mongodb.core.mapping.event.AuditingEventListener" />
|
||||
<tool:exports type="org.springframework.data.auditing.IsNewAwareAuditingHandler" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attributeGroup ref="repository:auditing-attributes" />
|
||||
<xsd:attribute name="mapping-context-ref" type="mappingContextRef" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:simpleType name="typeMapperRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoTypeMapper"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mappingContextRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mapping.model.MappingContext"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="fieldNamingStrategyRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.mapping.FieldNamingStrategy"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mongoTemplateRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="mongoRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="sslSocketFactoryRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="javax.net.ssl.SSLSocketFactory"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:simpleType name="writeConcernEnumeration">
|
||||
<xsd:restriction base="xsd:token">
|
||||
<xsd:enumeration value="NONE" />
|
||||
<xsd:enumeration value="NORMAL" />
|
||||
<xsd:enumeration value="SAFE" />
|
||||
<xsd:enumeration value="FSYNC_SAFE" />
|
||||
<xsd:enumeration value="REPLICAS_SAFE" />
|
||||
<xsd:enumeration value="JOURNAL_SAFE" />
|
||||
<xsd:enumeration value="MAJORITY" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
<!-- MLP
|
||||
<xsd:attributeGroup name="writeConcern">
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:simpleType>
|
||||
<xsd:restriction base="xsd:string">
|
||||
<xsd:enumeration value="NONE" />
|
||||
<xsd:enumeration value="NORMAL" />
|
||||
<xsd:enumeration value="SAFE" />
|
||||
<xsd:enumeration value="FSYNC_SAFE" />
|
||||
<xsd:enumeration value="REPLICA_SAFE" />
|
||||
<xsd:enumeration value="JOURNAL_SAFE" />
|
||||
<xsd:enumeration value="MAJORITY" />
|
||||
</xsd:restriction>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:attributeGroup>
|
||||
-->
|
||||
<xsd:complexType name="mongoType">
|
||||
<xsd:sequence minOccurs="0" maxOccurs="1">
|
||||
<xsd:element name="options" type="optionsType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The Mongo driver options
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation>
|
||||
<tool:exports type="com.mongodb.MongoOptions"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:element>
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
<!-- MLP
|
||||
<xsd:attributeGroup ref="writeConcern" />
|
||||
-->
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongo").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="port" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The port to connect to MongoDB server. Default is 27017
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="host" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The host to connect to a MongoDB server. Default is localhost
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="replica-set" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:complexType name="optionsType">
|
||||
<xsd:attribute name="connections-per-host" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The number of connections allowed per host. Will block if run out. Default is 10. System property MONGO.POOLSIZE can override
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="threads-allowed-to-block-for-connection-multiplier" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The multiplier for connectionsPerHost for # of threads that can block. Default is 5.
|
||||
If connectionsPerHost is 10, and threadsAllowedToBlockForConnectionMultiplier is 5,
|
||||
then 50 threads can block more than that and an exception will be thrown.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-wait-time" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="connect-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The connect timeout in milliseconds. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The socket timeout. 0 is default and infinite.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="socket-keep-alive" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="auto-connect-retry" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls whether or not on a connect, the system retries automatically. Default is false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="max-auto-connect-retry-time" type="xsd:long">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The maximum amount of time in millisecons to spend retrying to open connection to the same server. Default is 0, which means to use the default 15s if autoConnectRetry is on.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-number" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This specifies the number of servers to wait for on the write operation, and exception raising behavior. The 'w' option to the getlasterror command. Defaults to 0.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-timeout" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls timeout for write operations in milliseconds. The 'wtimeout' option to the getlasterror command. Defaults to 0 (indefinite). Greater than zero is number of milliseconds to wait.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-fsync" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="slave-ok" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls if the driver is allowed to read from secondaries or slaves. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="ssl" type="xsd:boolean">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
This controls if the driver should us an SSL connection. Defaults to false.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="ssl-socket-factory-ref" type="sslSocketFactoryRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The SSLSocketFactory to use for the SSL connection. If none is configured here, SSLSocketFactory#getDefault() will be used.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:group name="beanElementGroup">
|
||||
<xsd:choice>
|
||||
<xsd:element ref="beans:bean"/>
|
||||
<xsd:element ref="beans:ref"/>
|
||||
</xsd:choice>
|
||||
</xsd:group>
|
||||
|
||||
<xsd:complexType name="customConverterType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Element defining a custom converterr.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:group ref="beanElementGroup" minOccurs="0" maxOccurs="1"/>
|
||||
<xsd:attribute name="ref" type="xsd:string">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
A reference to a custom converter.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref"/>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
|
||||
<xsd:simpleType name="converterRef">
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
<xsd:union memberTypes="xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
|
||||
<xsd:element name="template">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="converter-ref" type="converterRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongoconverter instance.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string"
|
||||
use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to
|
||||
type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="write-concern">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="writeConcernEnumeration xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
<xsd:element name="gridFsTemplate">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
Defines a MongoDbFactory for connecting to a specific database
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="id" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The name of the mongo definition (by default "mongoDbFactory").]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="converter-ref" type="converterRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The reference to a Mongoconverter instance.
|
||||
]]>
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.convert.MongoConverter"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="db-factory-ref" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
The reference to a DbFactory.
|
||||
</xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.MongoDbFactory" />
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="bucket" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The GridFs bucket string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:schema>
|
||||
@@ -122,7 +122,7 @@ public class AuditingViaJavaConfigRepositoriesTests {
|
||||
static interface AuditablePersonRepository extends MongoRepository<AuditablePerson, String> {}
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
@EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true)
|
||||
@EnableMongoAuditing
|
||||
static class SimpleConfigWithRepositories {
|
||||
|
||||
|
||||
@@ -21,9 +21,13 @@ import static org.junit.Assert.*;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.BeanReference;
|
||||
import org.springframework.beans.factory.parsing.BeanDefinitionParsingException;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
|
||||
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
@@ -45,17 +49,14 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Ryan Tenney
|
||||
*/
|
||||
public class MappingMongoConverterParserIntegrationTests {
|
||||
|
||||
DefaultListableBeanFactory factory;
|
||||
@Rule public ExpectedException exception = ExpectedException.none();
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
factory = new DefaultListableBeanFactory();
|
||||
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory);
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter.xml"));
|
||||
}
|
||||
DefaultListableBeanFactory factory;
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-243
|
||||
@@ -63,6 +64,7 @@ public class MappingMongoConverterParserIntegrationTests {
|
||||
@Test
|
||||
public void allowsDbFactoryRefAttribute() {
|
||||
|
||||
loadValidConfiguration();
|
||||
factory.getBeanDefinition("converter");
|
||||
factory.getBean("converter");
|
||||
}
|
||||
@@ -73,6 +75,7 @@ public class MappingMongoConverterParserIntegrationTests {
|
||||
@Test
|
||||
public void hasCustomTypeMapper() {
|
||||
|
||||
loadValidConfiguration();
|
||||
MappingMongoConverter converter = factory.getBean("converter", MappingMongoConverter.class);
|
||||
MongoTypeMapper customMongoTypeMapper = factory.getBean(CustomMongoTypeMapper.class);
|
||||
|
||||
@@ -85,6 +88,7 @@ public class MappingMongoConverterParserIntegrationTests {
|
||||
@Test
|
||||
public void scansForConverterAndSetsUpCustomConversionsAccordingly() {
|
||||
|
||||
loadValidConfiguration();
|
||||
CustomConversions conversions = factory.getBean(CustomConversions.class);
|
||||
assertThat(conversions.hasCustomWriteTarget(Person.class), is(true));
|
||||
assertThat(conversions.hasCustomWriteTarget(Account.class), is(true));
|
||||
@@ -96,6 +100,7 @@ public class MappingMongoConverterParserIntegrationTests {
|
||||
@Test
|
||||
public void activatesAbbreviatingPropertiesCorrectly() {
|
||||
|
||||
loadValidConfiguration();
|
||||
BeanDefinition definition = factory.getBeanDefinition("abbreviatingConverter.mongoMappingContext");
|
||||
Object value = definition.getPropertyValues().getPropertyValue("fieldNamingStrategy").getValue();
|
||||
|
||||
@@ -104,6 +109,76 @@ public class MappingMongoConverterParserIntegrationTests {
|
||||
assertThat(strategy.getBeanClassName(), is(CamelCaseAbbreviatingFieldNamingStrategy.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-866
|
||||
*/
|
||||
@Test
|
||||
public void rejectsInvalidFieldNamingStrategyConfiguration() {
|
||||
|
||||
exception.expect(BeanDefinitionParsingException.class);
|
||||
exception.expectMessage("abbreviation");
|
||||
exception.expectMessage("field-naming-strategy-ref");
|
||||
|
||||
BeanDefinitionRegistry factory = new DefaultListableBeanFactory();
|
||||
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory);
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-invalid.xml"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-892
|
||||
*/
|
||||
@Test
|
||||
public void shouldThrowBeanDefinitionParsingExceptionIfConverterDefinedAsNestedBean() {
|
||||
|
||||
exception.expect(BeanDefinitionParsingException.class);
|
||||
exception.expectMessage("Mongo Converter must not be defined as nested bean.");
|
||||
|
||||
loadNestedBeanConfiguration();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-925, DATAMONGO-928
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportCustomFieldNamingStrategy() {
|
||||
assertStrategyReferenceSetFor("mappingConverterWithCustomFieldNamingStrategy");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-925, DATAMONGO-928
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotFailLoadingConfigIfAbbreviationIsDisabledAndStrategySet() {
|
||||
assertStrategyReferenceSetFor("mappingConverterWithCustomFieldNamingStrategyAndAbbreviationDisabled");
|
||||
}
|
||||
|
||||
private void loadValidConfiguration() {
|
||||
this.loadConfiguration("namespace/converter.xml");
|
||||
}
|
||||
|
||||
private void loadNestedBeanConfiguration() {
|
||||
this.loadConfiguration("namespace/converter-nested-bean-definition.xml");
|
||||
}
|
||||
|
||||
private void loadConfiguration(String configLocation) {
|
||||
factory = new DefaultListableBeanFactory();
|
||||
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory);
|
||||
reader.loadBeanDefinitions(new ClassPathResource(configLocation));
|
||||
}
|
||||
|
||||
private static void assertStrategyReferenceSetFor(String beanId) {
|
||||
|
||||
BeanDefinitionRegistry factory = new DefaultListableBeanFactory();
|
||||
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory);
|
||||
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-custom-fieldnamingstrategy.xml"));
|
||||
|
||||
BeanDefinition definition = reader.getRegistry().getBeanDefinition(beanId.concat(".mongoMappingContext"));
|
||||
BeanReference value = (BeanReference) definition.getPropertyValues().getPropertyValue("fieldNamingStrategy")
|
||||
.getValue();
|
||||
|
||||
assertThat(value.getBeanName(), is("customFieldNamingStrategy"));
|
||||
}
|
||||
|
||||
@Component
|
||||
public static class SampleConverter implements Converter<Person, DBObject> {
|
||||
public DBObject convert(Person source) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@@ -49,11 +50,17 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-454
|
||||
* @see DATAMONGO-1062
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsAddressConfigWithoutASingleParsableServerAddress() {
|
||||
public void rejectsAddressConfigWithoutASingleParsableAndResolvableServerAddress() {
|
||||
|
||||
editor.setAsText("foo, bar");
|
||||
String unknownHost1 = "gugu.nonexistant.example.org";
|
||||
String unknownHost2 = "gaga.nonexistant.example.org";
|
||||
|
||||
assertUnresolveableHostnames(unknownHost1, unknownHost2);
|
||||
|
||||
editor.setAsText(unknownHost1 + "," + unknownHost2);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -193,4 +200,16 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress), port)));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertUnresolveableHostnames(String... hostnames) {
|
||||
|
||||
for (String hostname : hostnames) {
|
||||
try {
|
||||
InetAddress.getByName(hostname);
|
||||
Assert.fail("Supposedly unresolveable hostname '" + hostname + "' can be resolved.");
|
||||
} catch (UnknownHostException expected) {
|
||||
// ok
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link DefaultIndexOperations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class DefaultIndexOperationsIntegrationTests {
|
||||
|
||||
static final DBObject GEO_SPHERE_2D = new BasicDBObject("loaction", "2dsphere");
|
||||
|
||||
@Autowired MongoTemplate template;
|
||||
DefaultIndexOperations indexOps;
|
||||
DBCollection collection;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
String collectionName = this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class);
|
||||
|
||||
this.collection = this.template.getDb().getCollection(collectionName);
|
||||
this.collection.dropIndexes();
|
||||
|
||||
this.indexOps = new DefaultIndexOperations(template, collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1008
|
||||
*/
|
||||
@Test
|
||||
public void getIndexInfoShouldBeAbleToRead2dsphereIndex() {
|
||||
|
||||
collection.createIndex(GEO_SPHERE_2D);
|
||||
|
||||
IndexInfo info = findAndReturnIndexInfo(GEO_SPHERE_2D);
|
||||
assertThat(info.getIndexFields().get(0).isGeo(), is(true));
|
||||
}
|
||||
|
||||
private IndexInfo findAndReturnIndexInfo(DBObject keys) {
|
||||
return findAndReturnIndexInfo(indexOps.getIndexInfo(), keys);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private static IndexInfo findAndReturnIndexInfo(Iterable<IndexInfo> candidates, DBObject keys) {
|
||||
return findAndReturnIndexInfo(candidates, DBCollection.genIndexName(keys));
|
||||
}
|
||||
|
||||
private static IndexInfo findAndReturnIndexInfo(Iterable<IndexInfo> candidates, String name) {
|
||||
|
||||
for (IndexInfo info : candidates) {
|
||||
if (ObjectUtils.nullSafeEquals(name, info.getName())) {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
throw new AssertionError(String.format("Index with %s was not found", name));
|
||||
}
|
||||
|
||||
static class DefaultIndexOperationsIntegrationTestsSample {}
|
||||
}
|
||||
@@ -34,7 +34,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
@@ -75,6 +74,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -187,6 +187,9 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(DocumentWithCollectionOfSimpleType.class);
|
||||
template.dropCollection(DocumentWithMultipleCollections.class);
|
||||
template.dropCollection(DocumentWithDBRefCollection.class);
|
||||
template.dropCollection(SomeContent.class);
|
||||
template.dropCollection(SomeTemplate.class);
|
||||
template.dropCollection(Address.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -299,6 +302,9 @@ public class MongoTemplateTests {
|
||||
@Test
|
||||
public void rejectsDuplicateIdInInsertAll() {
|
||||
|
||||
thrown.expect(DataIntegrityViolationException.class);
|
||||
thrown.expectMessage("E11000 duplicate key error index: database.person.$_id_");
|
||||
|
||||
MongoTemplate template = new MongoTemplate(factory);
|
||||
template.setWriteResultChecking(WriteResultChecking.EXCEPTION);
|
||||
|
||||
@@ -310,15 +316,7 @@ public class MongoTemplateTests {
|
||||
records.add(person);
|
||||
records.add(person);
|
||||
|
||||
try {
|
||||
template.insertAll(records);
|
||||
fail("Expected DataIntegrityViolationException!");
|
||||
} catch (DataIntegrityViolationException e) {
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
CoreMatchers
|
||||
.startsWith("Insert list failed: E11000 duplicate key error index: database.person.$_id_ dup key: { : ObjectId"));
|
||||
}
|
||||
template.insertAll(records);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -2515,6 +2513,37 @@ public class MongoTemplateTests {
|
||||
assertThat(template.getDb().getCollection("sample").find(new BasicDBObject("field", "data")).count(), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowSavingOfLazyLoadedDbRefs() {
|
||||
|
||||
template.dropCollection(SomeTemplate.class);
|
||||
template.dropCollection(SomeMessage.class);
|
||||
template.dropCollection(SomeContent.class);
|
||||
|
||||
SomeContent content = new SomeContent();
|
||||
content.id = "content-1";
|
||||
content.text = "spring";
|
||||
template.save(content);
|
||||
|
||||
SomeTemplate tmpl = new SomeTemplate();
|
||||
tmpl.id = "template-1";
|
||||
tmpl.content = content; // @DBRef(lazy=true) tmpl.content
|
||||
|
||||
template.save(tmpl);
|
||||
|
||||
SomeTemplate savedTmpl = template.findById(tmpl.id, SomeTemplate.class);
|
||||
|
||||
SomeContent loadedContent = savedTmpl.getContent();
|
||||
loadedContent.setText("data");
|
||||
template.save(loadedContent);
|
||||
|
||||
assertThat(template.findById(content.id, SomeContent.class).getText(), is("data"));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-880
|
||||
*/
|
||||
@@ -2599,10 +2628,189 @@ public class MongoTemplateTests {
|
||||
assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(3));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-888
|
||||
*/
|
||||
@Test
|
||||
public void sortOnIdFieldPropertyShouldBeMappedCorrectly() {
|
||||
|
||||
DoucmentWithNamedIdField one = new DoucmentWithNamedIdField();
|
||||
one.someIdKey = "1";
|
||||
one.value = "a";
|
||||
|
||||
DoucmentWithNamedIdField two = new DoucmentWithNamedIdField();
|
||||
two.someIdKey = "2";
|
||||
two.value = "b";
|
||||
|
||||
template.save(one);
|
||||
template.save(two);
|
||||
|
||||
Query query = query(where("_id").in("1", "2")).with(new Sort(Direction.DESC, "someIdKey"));
|
||||
assertThat(template.find(query, DoucmentWithNamedIdField.class), contains(two, one));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-888
|
||||
*/
|
||||
@Test
|
||||
public void sortOnAnnotatedFieldPropertyShouldBeMappedCorrectly() {
|
||||
|
||||
DoucmentWithNamedIdField one = new DoucmentWithNamedIdField();
|
||||
one.someIdKey = "1";
|
||||
one.value = "a";
|
||||
|
||||
DoucmentWithNamedIdField two = new DoucmentWithNamedIdField();
|
||||
two.someIdKey = "2";
|
||||
two.value = "b";
|
||||
|
||||
template.save(one);
|
||||
template.save(two);
|
||||
|
||||
Query query = query(where("_id").in("1", "2")).with(new Sort(Direction.DESC, "value"));
|
||||
assertThat(template.find(query, DoucmentWithNamedIdField.class), contains(two, one));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-913
|
||||
*/
|
||||
@Test
|
||||
public void shouldRetrieveInitializedValueFromDbRefAssociationAfterLoad() {
|
||||
|
||||
SomeContent content = new SomeContent();
|
||||
content.id = "content-1";
|
||||
content.name = "Content 1";
|
||||
content.text = "Some text";
|
||||
|
||||
template.save(content);
|
||||
|
||||
SomeTemplate tmpl = new SomeTemplate();
|
||||
tmpl.id = "template-1";
|
||||
tmpl.content = content;
|
||||
|
||||
template.save(tmpl);
|
||||
|
||||
SomeTemplate result = template.findOne(query(where("content").is(tmpl.getContent())), SomeTemplate.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.getContent(), is(notNullValue()));
|
||||
assertThat(result.getContent().getId(), is(notNullValue()));
|
||||
assertThat(result.getContent().getName(), is(notNullValue()));
|
||||
assertThat(result.getContent().getText(), is(content.getText()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-913
|
||||
*/
|
||||
@Test
|
||||
public void shouldReuseExistingDBRefInQueryFromDbRefAssociationAfterLoad() {
|
||||
|
||||
SomeContent content = new SomeContent();
|
||||
content.id = "content-1";
|
||||
content.name = "Content 1";
|
||||
content.text = "Some text";
|
||||
|
||||
template.save(content);
|
||||
|
||||
SomeTemplate tmpl = new SomeTemplate();
|
||||
tmpl.id = "template-1";
|
||||
tmpl.content = content;
|
||||
|
||||
template.save(tmpl);
|
||||
|
||||
SomeTemplate result = template.findOne(query(where("content").is(tmpl.getContent())), SomeTemplate.class);
|
||||
|
||||
// Use lazy-loading-proxy in query
|
||||
result = template.findOne(query(where("content").is(result.getContent())), SomeTemplate.class);
|
||||
|
||||
assertNotNull(result.getContent().getName());
|
||||
assertThat(result.getContent().getName(), is(content.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-970
|
||||
*/
|
||||
@Test
|
||||
public void insertsAndRemovesBasicDbObjectCorrectly() {
|
||||
|
||||
BasicDBObject object = new BasicDBObject("key", "value");
|
||||
template.insert(object, "collection");
|
||||
|
||||
assertThat(object.get("_id"), is(notNullValue()));
|
||||
assertThat(template.findAll(DBObject.class, "collection"), hasSize(1));
|
||||
|
||||
template.remove(object, "collection");
|
||||
assertThat(template.findAll(DBObject.class, "collection"), hasSize(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1207
|
||||
*/
|
||||
@Test
|
||||
public void ignoresNullElementsForInsertAll() {
|
||||
|
||||
Address newYork = new Address("NY", "New York");
|
||||
Address washington = new Address("DC", "Washington");
|
||||
|
||||
template.insertAll(Arrays.asList(newYork, null, washington));
|
||||
|
||||
List<Address> result = template.findAll(Address.class);
|
||||
|
||||
assertThat(result, hasSize(2));
|
||||
assertThat(result, hasItems(newYork, washington));
|
||||
}
|
||||
|
||||
static class DoucmentWithNamedIdField {
|
||||
|
||||
@Id String someIdKey;
|
||||
|
||||
@Field(value = "val")//
|
||||
String value;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + (someIdKey == null ? 0 : someIdKey.hashCode());
|
||||
result = prime * result + (value == null ? 0 : value.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (!(obj instanceof DoucmentWithNamedIdField)) {
|
||||
return false;
|
||||
}
|
||||
DoucmentWithNamedIdField other = (DoucmentWithNamedIdField) obj;
|
||||
if (someIdKey == null) {
|
||||
if (other.someIdKey != null) {
|
||||
return false;
|
||||
}
|
||||
} else if (!someIdKey.equals(other.someIdKey)) {
|
||||
return false;
|
||||
}
|
||||
if (value == null) {
|
||||
if (other.value != null) {
|
||||
return false;
|
||||
}
|
||||
} else if (!value.equals(other.value)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class DocumentWithDBRefCollection {
|
||||
|
||||
@Id public String id;
|
||||
|
||||
@Field("db_ref_list")/** @see DATAMONGO-1058 */
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
public List<Sample> dbRefAnnotatedList;
|
||||
|
||||
@@ -2737,6 +2945,41 @@ public class MongoTemplateTests {
|
||||
|
||||
String state;
|
||||
String city;
|
||||
|
||||
Address() {}
|
||||
|
||||
Address(String state, String city) {
|
||||
this.state = state;
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof Address)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Address that = (Address) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.city, that.city) && //
|
||||
ObjectUtils.nullSafeEquals(this.state, that.state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.city);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.state);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
static class VersionedPerson {
|
||||
@@ -2793,6 +3036,20 @@ public class MongoTemplateTests {
|
||||
|
||||
String id;
|
||||
String text;
|
||||
String name;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setText(String text) {
|
||||
this.text = text;
|
||||
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getText() {
|
||||
return text;
|
||||
|
||||
@@ -42,7 +42,10 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
@@ -50,17 +53,22 @@ import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.CommandResult;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.ReadPreference;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link MongoTemplate}.
|
||||
@@ -235,7 +243,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
org.mockito.Matchers.isNull(DBObject.class), org.mockito.Matchers.isNull(DBObject.class), eq(false),
|
||||
captor.capture(), eq(false), eq(false));
|
||||
|
||||
Assert.assertThat(captor.getValue().get("$inc"), Is.<Object> is(new BasicDBObject("version", 1)));
|
||||
Assert.assertThat(captor.getValue().get("$inc"), Is.<Object> is(new BasicDBObject("version", 1L)));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -329,6 +337,94 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
verify(collection, never()).remove(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-948
|
||||
*/
|
||||
@Test
|
||||
public void sortShouldBeTakenAsIsWhenExecutingQueryWithoutSpecificTypeInformation() {
|
||||
|
||||
Query query = Query.query(Criteria.where("foo").is("bar")).with(new Sort("foo"));
|
||||
template.executeQuery(query, "collection1", new DocumentCallbackHandler() {
|
||||
|
||||
@Override
|
||||
public void processDocument(DBObject dbObject) throws MongoException, DataAccessException {
|
||||
// nothing to do - just a test
|
||||
}
|
||||
});
|
||||
|
||||
ArgumentCaptor<DBObject> captor = ArgumentCaptor.forClass(DBObject.class);
|
||||
verify(cursor, times(1)).sort(captor.capture());
|
||||
assertThat(captor.getValue(), equalTo(new BasicDBObjectBuilder().add("foo", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void aggregateShouldHonorReadPreferenceWhenSet() {
|
||||
|
||||
CommandResult result = mock(CommandResult.class);
|
||||
|
||||
when(result.get("result")).thenReturn(Collections.emptySet());
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(result);
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(result);
|
||||
template.setReadPreference(ReadPreference.secondary());
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void aggregateShouldIgnoreReadPreferenceWhenNotSet() {
|
||||
|
||||
CommandResult result = mock(CommandResult.class);
|
||||
|
||||
when(result.get("result")).thenReturn(Collections.emptySet());
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(result);
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(result);
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void geoNearShouldHonorReadPreferenceWhenSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class)))
|
||||
.thenReturn(mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
template.setReadPreference(ReadPreference.secondary());
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 1));
|
||||
template.geoNear(query, Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void geoNearShouldIgnoreReadPreferenceWhenNotSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class)))
|
||||
.thenReturn(mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 1));
|
||||
template.geoNear(query, Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
class AutogenerateableId {
|
||||
|
||||
@Id BigInteger id;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,9 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
@@ -33,14 +33,13 @@ import com.mongodb.DBCursor;
|
||||
* Unit tests for {@link QueryCursorPreparer}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class QueryCursorPreparerUnitTests {
|
||||
|
||||
@Mock
|
||||
MongoDbFactory factory;
|
||||
@Mock
|
||||
DBCursor cursor;
|
||||
@Mock MongoDbFactory factory;
|
||||
@Mock DBCursor cursor;
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-185
|
||||
@@ -50,7 +49,7 @@ public class QueryCursorPreparerUnitTests {
|
||||
|
||||
Query query = query(where("foo").is("bar")).withHint("hint");
|
||||
|
||||
CursorPreparer preparer = new MongoTemplate(factory).new QueryCursorPreparer(query);
|
||||
CursorPreparer preparer = new MongoTemplate(factory).new QueryCursorPreparer(query, null);
|
||||
preparer.prepare(cursor);
|
||||
|
||||
verify(cursor).hint("hint");
|
||||
|
||||
@@ -47,6 +47,7 @@ import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationTests.CarDescriptor.Entry;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
@@ -822,6 +823,40 @@ public class AggregationTests {
|
||||
assertThat(invoice.getTotalAmount(), is(closeTo(9.877, 000001)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-924
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowGroupingByAliasedFieldDefinedInFormerAggregationStage() {
|
||||
|
||||
mongoTemplate.dropCollection(CarPerson.class);
|
||||
|
||||
CarPerson person1 = new CarPerson("first1", "last1", new CarDescriptor.Entry("MAKE1", "MODEL1", 2000),
|
||||
new CarDescriptor.Entry("MAKE1", "MODEL2", 2001), new CarDescriptor.Entry("MAKE2", "MODEL3", 2010),
|
||||
new CarDescriptor.Entry("MAKE3", "MODEL4", 2014));
|
||||
|
||||
CarPerson person2 = new CarPerson("first2", "last2", new CarDescriptor.Entry("MAKE3", "MODEL4", 2014));
|
||||
|
||||
CarPerson person3 = new CarPerson("first3", "last3", new CarDescriptor.Entry("MAKE2", "MODEL5", 2011));
|
||||
|
||||
mongoTemplate.save(person1);
|
||||
mongoTemplate.save(person2);
|
||||
mongoTemplate.save(person3);
|
||||
|
||||
TypedAggregation<CarPerson> agg = Aggregation.newAggregation(CarPerson.class,
|
||||
unwind("descriptors.carDescriptor.entries"), //
|
||||
project() //
|
||||
.and("descriptors.carDescriptor.entries.make").as("make") //
|
||||
.and("descriptors.carDescriptor.entries.model").as("model") //
|
||||
.and("firstName").as("firstName") //
|
||||
.and("lastName").as("lastName"), //
|
||||
group("make"));
|
||||
|
||||
AggregationResults<DBObject> result = mongoTemplate.aggregate(agg, DBObject.class);
|
||||
|
||||
assertThat(result.getMappedResults(), hasSize(3));
|
||||
}
|
||||
|
||||
private void assertLikeStats(LikeStats like, String id, long count) {
|
||||
|
||||
assertThat(like, is(notNullValue()));
|
||||
@@ -938,4 +973,52 @@ public class AggregationTests {
|
||||
this.createDate = createDate;
|
||||
}
|
||||
}
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Document
|
||||
static class CarPerson {
|
||||
|
||||
@Id private String id;
|
||||
private String firstName;
|
||||
private String lastName;
|
||||
private Descriptors descriptors;
|
||||
|
||||
public CarPerson(String firstname, String lastname, Entry... entries) {
|
||||
this.firstName = firstname;
|
||||
this.lastName = lastname;
|
||||
|
||||
this.descriptors = new Descriptors();
|
||||
|
||||
this.descriptors.carDescriptor = new CarDescriptor(entries);
|
||||
}
|
||||
}
|
||||
|
||||
static class Descriptors {
|
||||
private CarDescriptor carDescriptor;
|
||||
}
|
||||
|
||||
static class CarDescriptor {
|
||||
|
||||
private List<Entry> entries = new ArrayList<AggregationTests.CarDescriptor.Entry>();
|
||||
|
||||
public CarDescriptor(Entry... entries) {
|
||||
|
||||
for (Entry entry : entries) {
|
||||
this.entries.add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
static class Entry {
|
||||
private String make;
|
||||
private String model;
|
||||
private int year;
|
||||
|
||||
public Entry() {}
|
||||
|
||||
public Entry(String make, String model, int year) {
|
||||
this.make = make;
|
||||
this.model = model;
|
||||
this.year = year;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -179,4 +179,49 @@ public class AggregationUnitTests {
|
||||
DBObject fields = getAsDBObject(secondProjection, "$group");
|
||||
assertThat(fields.get("foosum"), is((Object) new BasicDBObject("$sum", "$foo")));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-908
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportReferingToNestedPropertiesInGroupOperation() {
|
||||
|
||||
DBObject agg = newAggregation( //
|
||||
project("cmsParameterId", "rules"), //
|
||||
unwind("rules"), //
|
||||
group("cmsParameterId", "rules.ruleType").count().as("totol") //
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
assertThat(agg, is(notNullValue()));
|
||||
|
||||
DBObject group = ((List<DBObject>) agg.get("pipeline")).get(2);
|
||||
DBObject fields = getAsDBObject(group, "$group");
|
||||
DBObject id = getAsDBObject(fields, "_id");
|
||||
|
||||
assertThat(id.get("ruleType"), is((Object) "$rules.ruleType"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-924
|
||||
*/
|
||||
@Test
|
||||
public void referencingProjectionAliasesFromPreviousStepShouldReferToTheSameFieldTarget() {
|
||||
|
||||
DBObject agg = newAggregation( //
|
||||
project().and("foo.bar").as("ba") //
|
||||
, project().and("ba").as("b") //
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
DBObject projection0 = extractPipelineElement(agg, 0, "$project");
|
||||
assertThat(projection0, is((DBObject) new BasicDBObject("ba", "$foo.bar")));
|
||||
|
||||
DBObject projection1 = extractPipelineElement(agg, 1, "$project");
|
||||
assertThat(projection1, is((DBObject) new BasicDBObject("b", "$ba")));
|
||||
}
|
||||
|
||||
private DBObject extractPipelineElement(DBObject agg, int index, String operation) {
|
||||
|
||||
List<DBObject> pipeline = (List<DBObject>) agg.get("pipeline");
|
||||
return (DBObject) pipeline.get(index).get(operation);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,25 +17,40 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link TypeBasedAggregationOperationContext}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class TypeBasedAggregationOperationContextUnitTests {
|
||||
@@ -89,6 +104,104 @@ public class TypeBasedAggregationOperationContextUnitTests {
|
||||
assertThat(context.getReference("id"), is(new FieldReference(new ExposedField(Fields.field("id", "_id"), true))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-912
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseCustomConversionIfPresentAndConversionIsRequiredInFirstStage() {
|
||||
|
||||
CustomConversions customConversions = customAgeConversions();
|
||||
converter.setCustomConversions(customConversions);
|
||||
customConversions.registerConvertersIn((GenericConversionService) converter.getConversionService());
|
||||
|
||||
AggregationOperationContext context = getContext(FooPerson.class);
|
||||
|
||||
MatchOperation matchStage = match(Criteria.where("age").is(new Age(10)));
|
||||
ProjectionOperation projectStage = project("age", "name");
|
||||
|
||||
DBObject agg = newAggregation(matchStage, projectStage).toDbObject("test", context);
|
||||
|
||||
DBObject age = getValue((DBObject) getValue(getPipelineElementFromAggregationAt(agg, 0), "$match"), "age");
|
||||
assertThat(age, is((DBObject) new BasicDBObject("v", 10)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-912
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseCustomConversionIfPresentAndConversionIsRequiredInLaterStage() {
|
||||
|
||||
CustomConversions customConversions = customAgeConversions();
|
||||
converter.setCustomConversions(customConversions);
|
||||
customConversions.registerConvertersIn((GenericConversionService) converter.getConversionService());
|
||||
|
||||
AggregationOperationContext context = getContext(FooPerson.class);
|
||||
|
||||
MatchOperation matchStage = match(Criteria.where("age").is(new Age(10)));
|
||||
ProjectionOperation projectStage = project("age", "name");
|
||||
|
||||
DBObject agg = newAggregation(projectStage, matchStage).toDbObject("test", context);
|
||||
|
||||
DBObject age = getValue((DBObject) getValue(getPipelineElementFromAggregationAt(agg, 1), "$match"), "age");
|
||||
assertThat(age, is((DBObject) new BasicDBObject("v", 10)));
|
||||
}
|
||||
|
||||
@Document(collection = "person")
|
||||
public static class FooPerson {
|
||||
|
||||
final ObjectId id;
|
||||
final String name;
|
||||
final Age age;
|
||||
|
||||
@PersistenceConstructor
|
||||
FooPerson(ObjectId id, String name, Age age) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Age {
|
||||
|
||||
final int value;
|
||||
|
||||
Age(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
public CustomConversions customAgeConversions() {
|
||||
return new CustomConversions(Arrays.<Converter<?, ?>> asList(ageWriteConverter(), ageReadConverter()));
|
||||
}
|
||||
|
||||
Converter<Age, DBObject> ageWriteConverter() {
|
||||
return new Converter<Age, DBObject>() {
|
||||
@Override
|
||||
public DBObject convert(Age age) {
|
||||
return new BasicDBObject("v", age.value);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Converter<DBObject, Age> ageReadConverter() {
|
||||
return new Converter<DBObject, Age>() {
|
||||
@Override
|
||||
public Age convert(DBObject dbObject) {
|
||||
return new Age(((Integer) dbObject.get("v")));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static DBObject getPipelineElementFromAggregationAt(DBObject agg, int index) {
|
||||
return ((List<DBObject>) agg.get("pipeline")).get(index);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static <T> T getValue(DBObject o, String key) {
|
||||
return (T) o.get(key);
|
||||
}
|
||||
|
||||
private TypeBasedAggregationOperationContext getContext(Class<?> type) {
|
||||
return new TypeBasedAggregationOperationContext(type, context, mapper);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,18 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
@@ -15,6 +30,7 @@ import org.bson.types.Binary;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
@@ -26,6 +42,7 @@ import com.mongodb.DBRef;
|
||||
* Unit tests for {@link CustomConversions}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @auhtor Christoph Strobl
|
||||
*/
|
||||
public class CustomConversionsUnitTests {
|
||||
|
||||
@@ -197,6 +214,35 @@ public class CustomConversionsUnitTests {
|
||||
assertThat(conversionService.convert(new DateTime(), Date.class), is(new Date(0)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldSelectPropertCustomWriteTargetForCglibProxiedType() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(FormatToStringConverter.INSTANCE));
|
||||
assertThat(conversions.getCustomWriteTarget(createProxyTypeFor(Format.class)), is(typeCompatibleWith(String.class)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldSelectPropertCustomReadTargetForCglibProxiedType() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(CustomObjectToStringConverter.INSTANCE));
|
||||
assertThat(conversions.hasCustomReadTarget(createProxyTypeFor(Object.class), String.class), is(true));
|
||||
}
|
||||
|
||||
private static Class<?> createProxyTypeFor(Class<?> type) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setProxyTargetClass(true);
|
||||
factory.setTargetClass(type);
|
||||
|
||||
return factory.getProxy().getClass();
|
||||
}
|
||||
|
||||
enum FormatToStringConverter implements Converter<Format, String> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -251,4 +297,15 @@ public class CustomConversionsUnitTests {
|
||||
return new Date(0);
|
||||
}
|
||||
}
|
||||
|
||||
enum CustomObjectToStringConverter implements Converter<Object, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public String convert(Object source) {
|
||||
return source != null ? source.toString() : null;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -429,6 +429,27 @@ public class DbRefMappingMongoConverterUnitTests {
|
||||
assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride2, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-987
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotGenerateLazyLoadingProxyForNullValues() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs();
|
||||
lazyDbRefs.id = "42";
|
||||
converter.write(lazyDbRefs, dbo);
|
||||
|
||||
ClassWithLazyDbRefs result = converter.read(ClassWithLazyDbRefs.class, dbo);
|
||||
|
||||
assertThat(result.id, is(lazyDbRefs.id));
|
||||
assertThat(result.dbRefToInterface, is(nullValue()));
|
||||
assertThat(result.dbRefToConcreteCollection, is(nullValue()));
|
||||
assertThat(result.dbRefToConcreteType, is(nullValue()));
|
||||
assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor, is(nullValue()));
|
||||
assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor, is(nullValue()));
|
||||
}
|
||||
|
||||
private Object transport(Object result) {
|
||||
return SerializationUtils.deserialize(SerializationUtils.serialize(result));
|
||||
}
|
||||
|
||||
@@ -15,10 +15,26 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.*;
|
||||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.getAsDBObject;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.getTypedValue;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
@@ -45,16 +61,23 @@ import org.hamcrest.Matchers;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.beans.ConversionNotSupportedException;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.annotation.TypeAlias;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.geo.Box;
|
||||
import org.springframework.data.geo.Circle;
|
||||
import org.springframework.data.geo.Distance;
|
||||
@@ -67,6 +90,7 @@ import org.springframework.data.mapping.model.MappingInstantiationException;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.convert.DBObjectAccessorUnitTests.NestedType;
|
||||
import org.springframework.data.mongodb.core.convert.DBObjectAccessorUnitTests.ProjectingType;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverterUnitTests.ClassWithMapUsingEnumAsKey.FooBarEnum;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
@@ -98,6 +122,8 @@ public class MappingMongoConverterUnitTests {
|
||||
@Mock ApplicationContext context;
|
||||
@Mock DbRefResolver resolver;
|
||||
|
||||
public @Rule ExpectedException exception = ExpectedException.none();
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
@@ -1398,6 +1424,7 @@ public class MappingMongoConverterUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-812
|
||||
* @see DATAMONGO-893
|
||||
*/
|
||||
@Test
|
||||
public void convertsListToBasicDBListAndRetainsTypeInformationForComplexObjects() {
|
||||
@@ -1407,7 +1434,7 @@ public class MappingMongoConverterUnitTests {
|
||||
address.street = "Foo";
|
||||
|
||||
Object result = converter.convertToMongoType(Collections.singletonList(address),
|
||||
ClassTypeInformation.from(Address.class));
|
||||
ClassTypeInformation.from(InterfaceType.class));
|
||||
|
||||
assertThat(result, is(instanceOf(BasicDBList.class)));
|
||||
|
||||
@@ -1806,6 +1833,138 @@ public class MappingMongoConverterUnitTests {
|
||||
assertThat(result.shape, is((Shape) sphere));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldWriteCglibProxiedClassTypeInformationCorrectly() {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTargetClass(GenericType.class);
|
||||
factory.setProxyTargetClass(true);
|
||||
|
||||
GenericType<?> proxied = (GenericType<?>) factory.getProxy();
|
||||
BasicDBObject dbo = new BasicDBObject();
|
||||
converter.write(proxied, dbo);
|
||||
|
||||
assertThat(dbo.get("_class"), is((Object) GenericType.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseTargetObjectOfLazyLoadingProxyWhenWriting() {
|
||||
|
||||
LazyLoadingProxy mock = mock(LazyLoadingProxy.class);
|
||||
|
||||
BasicDBObject dbo = new BasicDBObject();
|
||||
converter.write(mock, dbo);
|
||||
|
||||
verify(mock, times(1)).initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1034
|
||||
*/
|
||||
@Test
|
||||
public void rejectsBasicDbListToBeConvertedIntoComplexType() {
|
||||
|
||||
BasicDBList inner = new BasicDBList();
|
||||
inner.add("key");
|
||||
inner.add("value");
|
||||
|
||||
BasicDBList outer = new BasicDBList();
|
||||
outer.add(inner);
|
||||
outer.add(inner);
|
||||
|
||||
BasicDBObject source = new BasicDBObject("attributes", outer);
|
||||
|
||||
exception.expect(MappingException.class);
|
||||
exception.expectMessage(Item.class.getName());
|
||||
exception.expectMessage(BasicDBList.class.getName());
|
||||
|
||||
converter.read(Item.class, source);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1058
|
||||
*/
|
||||
@Test
|
||||
public void readShouldRespectExplicitFieldNameForDbRef() {
|
||||
|
||||
BasicDBObject source = new BasicDBObject();
|
||||
source.append("explict-name-for-db-ref", new DBRef(mock(DB.class), "foo", "1"));
|
||||
|
||||
converter.read(ClassWithExplicitlyNamedDBRefProperty.class, source);
|
||||
|
||||
verify(resolver, times(1)).resolveDbRef(Mockito.any(MongoPersistentProperty.class), Mockito.any(DBRef.class),
|
||||
Mockito.any(DbRefResolverCallback.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void convertsMapKeyUsingCustomConverterForAndBackwards() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new FooBarEnumToStringConverter(),
|
||||
new StringToFooNumConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
ClassWithMapUsingEnumAsKey source = new ClassWithMapUsingEnumAsKey();
|
||||
source.map = new HashMap<FooBarEnum, String>();
|
||||
source.map.put(FooBarEnum.FOO, "wohoo");
|
||||
|
||||
DBObject target = new BasicDBObject();
|
||||
converter.write(source, target);
|
||||
|
||||
assertThat(converter.read(ClassWithMapUsingEnumAsKey.class, target).map, is(source.map));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
public void writesMapKeyUsingCustomConverter() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new FooBarEnumToStringConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
ClassWithMapUsingEnumAsKey source = new ClassWithMapUsingEnumAsKey();
|
||||
source.map = new HashMap<FooBarEnum, String>();
|
||||
source.map.put(FooBarEnum.FOO, "spring");
|
||||
source.map.put(FooBarEnum.BAR, "data");
|
||||
|
||||
DBObject target = new BasicDBObject();
|
||||
converter.write(source, target);
|
||||
|
||||
DBObject map = DBObjectTestUtils.getAsDBObject(target, "map");
|
||||
|
||||
assertThat(map.containsField("foo-enum-value"), is(true));
|
||||
assertThat(map.containsField("bar-enum-value"), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
public void readsMapKeyUsingCustomConverter() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new StringToFooNumConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
DBObject source = new BasicDBObject("map", new BasicDBObject("foo-enum-value", "spring"));
|
||||
|
||||
ClassWithMapUsingEnumAsKey target = converter.read(ClassWithMapUsingEnumAsKey.class, source);
|
||||
|
||||
assertThat(target.map.get(FooBarEnum.FOO), is("spring"));
|
||||
}
|
||||
|
||||
static class GenericType<T> {
|
||||
T content;
|
||||
}
|
||||
@@ -1833,7 +1992,11 @@ public class MappingMongoConverterUnitTests {
|
||||
abstract void method();
|
||||
}
|
||||
|
||||
static class Address {
|
||||
static interface InterfaceType {
|
||||
|
||||
}
|
||||
|
||||
static class Address implements InterfaceType {
|
||||
String street;
|
||||
String city;
|
||||
}
|
||||
@@ -2052,4 +2215,60 @@ public class MappingMongoConverterUnitTests {
|
||||
|
||||
Shape shape;
|
||||
}
|
||||
|
||||
class ClassWithExplicitlyNamedDBRefProperty {
|
||||
|
||||
@Field("explict-name-for-db-ref")//
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
ClassWithIntId dbRefProperty;
|
||||
|
||||
public ClassWithIntId getDbRefProperty() {
|
||||
return dbRefProperty;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class ClassWithMapUsingEnumAsKey {
|
||||
|
||||
static enum FooBarEnum {
|
||||
FOO, BAR;
|
||||
}
|
||||
|
||||
Map<FooBarEnum, String> map;
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
static class FooBarEnumToStringConverter implements Converter<FooBarEnum, String> {
|
||||
|
||||
@Override
|
||||
public String convert(FooBarEnum source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return FooBarEnum.FOO.equals(source) ? "foo-enum-value" : "bar-enum-value";
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
static class StringToFooNumConverter implements Converter<String, FooBarEnum> {
|
||||
|
||||
@Override
|
||||
public FooBarEnum convert(String source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (source.equals("foo-enum-value")) {
|
||||
return FooBarEnum.FOO;
|
||||
}
|
||||
if (source.equals("bar-enum-value")) {
|
||||
return FooBarEnum.BAR;
|
||||
}
|
||||
|
||||
throw new ConversionNotSupportedException(source, String.class, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,11 +34,14 @@ import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.Person;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
@@ -48,6 +51,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.QueryBuilder;
|
||||
|
||||
@@ -568,10 +572,53 @@ public class QueryMapperUnitTests {
|
||||
assertThat(mappedFields, is(notNullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-893
|
||||
*/
|
||||
@Test
|
||||
public void classInformationShouldNotBePresentInDBObjectUsedInFinderMethods() {
|
||||
|
||||
EmbeddedClass embedded = new EmbeddedClass();
|
||||
embedded.id = "1";
|
||||
|
||||
EmbeddedClass embedded2 = new EmbeddedClass();
|
||||
embedded2.id = "2";
|
||||
Query query = query(where("embedded").in(Arrays.asList(embedded, embedded2)));
|
||||
|
||||
DBObject dbo = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class));
|
||||
assertThat(dbo.toString(), equalTo("{ \"embedded\" : { \"$in\" : [ { \"_id\" : \"1\"} , { \"_id\" : \"2\"}]}}"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-647
|
||||
*/
|
||||
@Test
|
||||
public void customizedFieldNameShouldBeMappedCorrectlyWhenApplyingSort() {
|
||||
|
||||
Query query = query(where("field").is("bar")).with(new Sort(Direction.DESC, "field"));
|
||||
DBObject dbo = mapper.getMappedObject(query.getSortObject(), context.getPersistentEntity(CustomizedField.class));
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("foo", -1).get()));
|
||||
}
|
||||
|
||||
@Document
|
||||
public class Foo {
|
||||
@Id private ObjectId id;
|
||||
EmbeddedClass embedded;
|
||||
}
|
||||
|
||||
public class EmbeddedClass {
|
||||
public String id;
|
||||
}
|
||||
|
||||
class IdWrapper {
|
||||
Object id;
|
||||
}
|
||||
|
||||
class ClassWithEmbedded {
|
||||
@Id String id;
|
||||
Sample sample;
|
||||
}
|
||||
|
||||
class ClassWithDefaultId {
|
||||
|
||||
String id;
|
||||
|
||||
@@ -26,6 +26,7 @@ import java.util.List;
|
||||
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.collection.IsIterableContainingInOrder;
|
||||
import org.hamcrest.core.IsEqual;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
@@ -37,12 +38,16 @@ import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
@@ -452,6 +457,115 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-897
|
||||
*/
|
||||
@Test
|
||||
public void updateOnDbrefPropertyOfInterfaceTypeWithoutExplicitGetterForIdShouldBeMappedCorrectly() {
|
||||
|
||||
Update update = new Update().set("referencedDocument", new InterfaceDocumentDefinitionImpl("1", "Foo"));
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DocumentWithReferenceToInterfaceImpl.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedObject, "$set");
|
||||
Object model = $set.get("referencedDocument");
|
||||
|
||||
DBRef expectedDBRef = new DBRef(factory.getDb(), "interfaceDocumentDefinitionImpl", "1");
|
||||
assertThat(model, allOf(instanceOf(DBRef.class), IsEqual.<Object> equalTo(expectedDBRef)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-847
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperConvertsNestedQueryCorrectly() {
|
||||
|
||||
Update update = new Update().pull("list", Query.query(Criteria.where("value").in("foo", "bar")));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject $pull = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$pull");
|
||||
DBObject list = DBObjectTestUtils.getAsDBObject($pull, "aliased");
|
||||
DBObject value = DBObjectTestUtils.getAsDBObject(list, "value");
|
||||
BasicDBList $in = DBObjectTestUtils.getAsDBList(value, "$in");
|
||||
|
||||
assertThat($in, IsIterableContainingInOrder.<Object> contains("foo", "bar"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-847
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperConvertsPullWithNestedQuerfyOnDBRefCorrectly() {
|
||||
|
||||
Update update = new Update().pull("dbRefAnnotatedList", Query.query(Criteria.where("id").is("1")));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DocumentWithDBRefCollection.class));
|
||||
|
||||
DBObject $pull = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$pull");
|
||||
DBObject list = DBObjectTestUtils.getAsDBObject($pull, "dbRefAnnotatedList");
|
||||
|
||||
assertThat(list, equalTo(new BasicDBObjectBuilder().add("_id", "1").get()));
|
||||
}
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = "DocumentWithReferenceToInterface")
|
||||
static interface DocumentWithReferenceToInterface {
|
||||
|
||||
String getId();
|
||||
|
||||
InterfaceDocumentDefinitionWithoutId getReferencedDocument();
|
||||
|
||||
}
|
||||
|
||||
static interface InterfaceDocumentDefinitionWithoutId {
|
||||
|
||||
String getValue();
|
||||
}
|
||||
|
||||
static class InterfaceDocumentDefinitionImpl implements InterfaceDocumentDefinitionWithoutId {
|
||||
|
||||
@Id String id;
|
||||
String value;
|
||||
|
||||
public InterfaceDocumentDefinitionImpl(String id, String value) {
|
||||
|
||||
this.id = id;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getValue() {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class DocumentWithReferenceToInterfaceImpl implements DocumentWithReferenceToInterface {
|
||||
|
||||
private @Id String id;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
private InterfaceDocumentDefinitionWithoutId referencedDocument;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public void setModel(InterfaceDocumentDefinitionWithoutId referencedDocument) {
|
||||
this.referencedDocument = referencedDocument;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InterfaceDocumentDefinitionWithoutId getReferencedDocument() {
|
||||
return this.referencedDocument;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static interface Model {}
|
||||
|
||||
static class ModelImpl implements Model {
|
||||
|
||||
@@ -15,9 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.Before;
|
||||
@@ -27,10 +28,13 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.config.AbstractIntegrationTests;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.WriteResultChecking;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexed;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -43,6 +47,7 @@ import com.mongodb.WriteConcern;
|
||||
* @author Laurent Canet
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class GeoSpatialIndexTests extends AbstractIntegrationTests {
|
||||
|
||||
@@ -97,6 +102,29 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-827
|
||||
*/
|
||||
@Test
|
||||
public void useGeneratedNameShouldGenerateAnIndexName() {
|
||||
|
||||
try {
|
||||
|
||||
GeoSpatialEntity2dWithGeneratedIndex geo = new GeoSpatialEntity2dWithGeneratedIndex(45.2, 4.6);
|
||||
template.save(geo);
|
||||
|
||||
IndexOperations indexOps = template.indexOps(GeoSpatialEntity2dWithGeneratedIndex.class);
|
||||
List<IndexInfo> indexInfo = indexOps.getIndexInfo();
|
||||
|
||||
assertThat(indexInfo, hasSize(2));
|
||||
assertThat(indexInfo.get(1), is(notNullValue()));
|
||||
assertThat(indexInfo.get(1).getName(), is("location_2d"));
|
||||
|
||||
} finally {
|
||||
template.dropCollection(GeoSpatialEntity2D.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether an index with the given name exists for the given entity type.
|
||||
*
|
||||
@@ -129,6 +157,7 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests {
|
||||
});
|
||||
}
|
||||
|
||||
@Document
|
||||
static class GeoSpatialEntity2D {
|
||||
public String id;
|
||||
@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2D) public org.springframework.data.geo.Point location;
|
||||
@@ -138,6 +167,7 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests {
|
||||
}
|
||||
}
|
||||
|
||||
@Document
|
||||
static class GeoSpatialEntityHaystack {
|
||||
public String id;
|
||||
public String name;
|
||||
@@ -154,6 +184,7 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests {
|
||||
double coordinates[];
|
||||
}
|
||||
|
||||
@Document
|
||||
static class GeoSpatialEntity2DSphere {
|
||||
public String id;
|
||||
@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE) public GeoJsonPoint location;
|
||||
@@ -163,4 +194,15 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests {
|
||||
this.location.coordinates = new double[] { x, y };
|
||||
}
|
||||
}
|
||||
|
||||
@Document
|
||||
static class GeoSpatialEntity2dWithGeneratedIndex {
|
||||
|
||||
public String id;
|
||||
@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2D, useGeneratedName = true) public Point location;
|
||||
|
||||
public GeoSpatialEntity2dWithGeneratedIndex(double x, double y) {
|
||||
this.location = new Point(x, y);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,8 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
@@ -25,7 +25,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.index.Indexed;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
@@ -38,13 +38,13 @@ import com.mongodb.MongoException;
|
||||
* Integration tests for index handling.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class IndexingIntegrationTests {
|
||||
|
||||
@Autowired
|
||||
MongoOperations operations;
|
||||
@Autowired MongoOperations operations;
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
@@ -61,11 +61,10 @@ public class IndexingIntegrationTests {
|
||||
assertThat(hasIndex("_firstname", IndexedPerson.class), is(true));
|
||||
}
|
||||
|
||||
@Document
|
||||
class IndexedPerson {
|
||||
|
||||
@Field("_firstname")
|
||||
@Indexed
|
||||
String firstname;
|
||||
@Field("_firstname") @Indexed String firstname;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,22 +17,31 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
||||
import org.hamcrest.core.IsEqual;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -41,24 +50,46 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
|
||||
@Mock MongoDbFactory factory;
|
||||
@Mock ApplicationContext context;
|
||||
private @Mock MongoDbFactory factory;
|
||||
private @Mock ApplicationContext context;
|
||||
private @Mock DB db;
|
||||
private @Mock DBCollection collection;
|
||||
|
||||
ArgumentCaptor<DBObject> keysCaptor;
|
||||
ArgumentCaptor<DBObject> optionsCaptor;
|
||||
ArgumentCaptor<String> collectionCaptor;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
keysCaptor = ArgumentCaptor.forClass(DBObject.class);
|
||||
optionsCaptor = ArgumentCaptor.forClass(DBObject.class);
|
||||
collectionCaptor = ArgumentCaptor.forClass(String.class);
|
||||
|
||||
when(factory.getDb()).thenReturn(db);
|
||||
when(db.getCollection(collectionCaptor.capture())).thenReturn(collection);
|
||||
|
||||
doNothing().when(collection).createIndex(keysCaptor.capture(), optionsCaptor.capture());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void buildsIndexDefinitionUsingFieldName() {
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(Person.class);
|
||||
DummyMongoPersistentEntityIndexCreator creator = new DummyMongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
assertThat(creator.indexDefinition, is(notNullValue()));
|
||||
assertThat(creator.indexDefinition.keySet(), hasItem("fieldname"));
|
||||
assertThat(creator.name, is("indexName"));
|
||||
assertThat(creator.background, is(false));
|
||||
assertThat(creator.expireAfterSeconds, is(-1));
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
assertThat(keysCaptor.getValue(), is(notNullValue()));
|
||||
assertThat(keysCaptor.getValue().keySet(), hasItem("fieldname"));
|
||||
assertThat(optionsCaptor.getValue().get("name").toString(), is("indexName"));
|
||||
assertThat(optionsCaptor.getValue().get("background"), nullValue());
|
||||
assertThat(optionsCaptor.getValue().get("expireAfterSeconds"), nullValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -67,7 +98,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
MongoMappingContext personMappingContext = prepareMappingContext(Person.class);
|
||||
|
||||
DummyMongoPersistentEntityIndexCreator creator = new DummyMongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
MongoPersistentEntity<?> entity = personMappingContext.getPersistentEntity(Person.class);
|
||||
MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty> event = new MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>(
|
||||
@@ -75,7 +106,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
|
||||
creator.onApplicationEvent(event);
|
||||
|
||||
assertThat(creator.indexDefinition, is(nullValue()));
|
||||
verifyZeroInteractions(collection);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -87,7 +118,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.initialize();
|
||||
|
||||
MongoPersistentEntityIndexCreator creator = new DummyMongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
assertThat(creator.isIndexCreatorFor(mappingContext), is(true));
|
||||
assertThat(creator.isIndexCreatorFor(new MongoMappingContext()), is(false));
|
||||
}
|
||||
@@ -99,12 +130,13 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
public void triggersBackgroundIndexingIfConfigured() {
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(AnotherPerson.class);
|
||||
DummyMongoPersistentEntityIndexCreator creator = new DummyMongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
assertThat(creator.indexDefinition, is(notNullValue()));
|
||||
assertThat(creator.indexDefinition.keySet(), hasItem("lastname"));
|
||||
assertThat(creator.name, is("lastname"));
|
||||
assertThat(creator.background, is(true));
|
||||
assertThat(keysCaptor.getValue(), is(notNullValue()));
|
||||
assertThat(keysCaptor.getValue().keySet(), hasItem("lastname"));
|
||||
assertThat(optionsCaptor.getValue().get("name").toString(), is("lastname"));
|
||||
assertThat(optionsCaptor.getValue().get("background"), IsEqual.<Object> equalTo(true));
|
||||
assertThat(optionsCaptor.getValue().get("expireAfterSeconds"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -114,11 +146,69 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
public void expireAfterSecondsIfConfigured() {
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(Milk.class);
|
||||
DummyMongoPersistentEntityIndexCreator creator = new DummyMongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
assertThat(creator.indexDefinition, is(notNullValue()));
|
||||
assertThat(creator.indexDefinition.keySet(), hasItem("expiry"));
|
||||
assertThat(creator.expireAfterSeconds, is(60));
|
||||
assertThat(keysCaptor.getValue(), is(notNullValue()));
|
||||
assertThat(keysCaptor.getValue().keySet(), hasItem("expiry"));
|
||||
assertThat(optionsCaptor.getValue().get("expireAfterSeconds"), IsEqual.<Object> equalTo(60L));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void createsNotNestedGeoSpatialIndexCorrectly() {
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class);
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
assertThat(keysCaptor.getValue(), equalTo(new BasicDBObjectBuilder().add("company.address.location", "2d").get()));
|
||||
assertThat(optionsCaptor.getValue(), equalTo(new BasicDBObjectBuilder().add("name", "company.address.location")
|
||||
.add("min", -180).add("max", 180).add("bits", 26).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-827
|
||||
*/
|
||||
@Test
|
||||
public void autoGeneratedIndexNameShouldGenerateNoName() {
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(EntityWithGeneratedIndexName.class);
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
assertThat(keysCaptor.getValue().containsField("name"), is(false));
|
||||
assertThat(keysCaptor.getValue().keySet(), hasItem("lastname"));
|
||||
assertThat(optionsCaptor.getValue(), is(new BasicDBObjectBuilder().get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-367
|
||||
*/
|
||||
@Test
|
||||
public void indexCreationShouldNotCreateNewCollectionForNestedGeoSpatialIndexStructures() {
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class);
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
ArgumentCaptor<String> collectionNameCapturer = ArgumentCaptor.forClass(String.class);
|
||||
|
||||
verify(db, times(1)).getCollection(collectionNameCapturer.capture());
|
||||
assertThat(collectionNameCapturer.getValue(), equalTo("wrapper"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-367
|
||||
*/
|
||||
@Test
|
||||
public void indexCreationShouldNotCreateNewCollectionForNestedIndexStructures() {
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(IndexedDocumentWrapper.class);
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
ArgumentCaptor<String> collectionNameCapturer = ArgumentCaptor.forClass(String.class);
|
||||
|
||||
verify(db, times(1)).getCollection(collectionNameCapturer.capture());
|
||||
assertThat(collectionNameCapturer.getValue(), equalTo("indexedDocumentWrapper"));
|
||||
}
|
||||
|
||||
private static MongoMappingContext prepareMappingContext(Class<?> type) {
|
||||
@@ -130,41 +220,63 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class Person {
|
||||
|
||||
@Indexed(name = "indexName") @Field("fieldname") String field;
|
||||
@Indexed(name = "indexName")//
|
||||
@Field("fieldname")//
|
||||
String field;
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
static class AnotherPerson {
|
||||
|
||||
@Indexed(background = true) String lastname;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class Milk {
|
||||
|
||||
@Indexed(expireAfterSeconds = 60) Date expiry;
|
||||
}
|
||||
|
||||
static class DummyMongoPersistentEntityIndexCreator extends MongoPersistentEntityIndexCreator {
|
||||
@Document
|
||||
static class Wrapper {
|
||||
|
||||
String id;
|
||||
Company company;
|
||||
|
||||
}
|
||||
|
||||
static class Company {
|
||||
|
||||
DBObject indexDefinition;
|
||||
String name;
|
||||
boolean background;
|
||||
int expireAfterSeconds;
|
||||
Address address;
|
||||
}
|
||||
|
||||
public DummyMongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory) {
|
||||
super(mappingContext, mongoDbFactory);
|
||||
}
|
||||
static class Address {
|
||||
|
||||
@Override
|
||||
protected void ensureIndex(String collection, String name, DBObject indexDefinition, boolean unique,
|
||||
boolean dropDups, boolean sparse, boolean background, int expireAfterSeconds) {
|
||||
String street;
|
||||
String city;
|
||||
|
||||
this.name = name;
|
||||
this.indexDefinition = indexDefinition;
|
||||
this.background = background;
|
||||
this.expireAfterSeconds = expireAfterSeconds;
|
||||
}
|
||||
@GeoSpatialIndexed Point location;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class IndexedDocumentWrapper {
|
||||
|
||||
IndexedDocument indexedDocument;
|
||||
}
|
||||
|
||||
static class IndexedDocument {
|
||||
|
||||
@Indexed String indexedValue;
|
||||
}
|
||||
|
||||
@Document
|
||||
class EntityWithGeneratedIndexName {
|
||||
|
||||
@Indexed(useGeneratedName = true, name = "ignored") String lastname;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,882 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.hamcrest.collection.IsCollectionWithSize.*;
|
||||
import static org.hamcrest.collection.IsEmptyCollection.*;
|
||||
import static org.hamcrest.core.IsEqual.*;
|
||||
import static org.hamcrest.core.IsInstanceOf.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.collection.IsEmptyCollection;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Suite;
|
||||
import org.junit.runners.Suite.SuiteClasses;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.CompoundIndexResolutionTests;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.GeoSpatialIndexResolutionTests;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.IndexResolutionTests;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.MixedIndexResolutionTests;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntityTestDummy.MongoPersistentEntityDummyBuilder;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(Suite.class)
|
||||
@SuiteClasses({ IndexResolutionTests.class, GeoSpatialIndexResolutionTests.class, CompoundIndexResolutionTests.class,
|
||||
MixedIndexResolutionTests.class })
|
||||
public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
|
||||
/**
|
||||
* Test resolution of {@link Indexed}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class IndexResolutionTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void indexPathOnLevelZeroIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelZero.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection("indexedProperty", "Zero", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void indexPathOnLevelOneIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelOne.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection("zero.indexedProperty", "One", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void depplyNestedIndexPathIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelTwo.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection("one.zero.indexedProperty", "Two", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void resolvesIndexPathNameForNamedPropertiesCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelOneWithExplicitlyNamedField.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection("customZero.customFieldName", "indexOnLevelOneWithExplicitlyNamedField",
|
||||
indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void resolvesIndexDefinitionCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelZero.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(indexDefinition.getIndexOptions(), equalTo(new BasicDBObjectBuilder().add("name", "indexedProperty")
|
||||
.get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void resolvesIndexDefinitionOptionsCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(WithOptionsOnIndexedProperty.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(indexDefinition.getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("name", "indexedProperty").add("unique", true).add("dropDups", true)
|
||||
.add("sparse", true).add("background", true).add("expireAfterSeconds", 10L).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void resolvesIndexCollectionNameCorrectlyWhenDefinedInAnnotation() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(WithOptionsOnIndexedProperty.class);
|
||||
assertThat(indexDefinitions.get(0).getCollection(), equalTo("CollectionOverride"));
|
||||
}
|
||||
|
||||
@Document(collection = "Zero")
|
||||
static class IndexOnLevelZero {
|
||||
@Indexed String indexedProperty;
|
||||
}
|
||||
|
||||
@Document(collection = "One")
|
||||
static class IndexOnLevelOne {
|
||||
IndexOnLevelZero zero;
|
||||
}
|
||||
|
||||
@Document(collection = "Two")
|
||||
static class IndexOnLevelTwo {
|
||||
IndexOnLevelOne one;
|
||||
}
|
||||
|
||||
@Document(collection = "WithOptionsOnIndexedProperty")
|
||||
static class WithOptionsOnIndexedProperty {
|
||||
|
||||
@Indexed(background = true, collection = "CollectionOverride", direction = IndexDirection.DESCENDING,
|
||||
dropDups = true, expireAfterSeconds = 10, sparse = true, unique = true)//
|
||||
String indexedProperty;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class IndexOnLevelOneWithExplicitlyNamedField {
|
||||
|
||||
@Field("customZero") IndexOnLevelZeroWithExplicityNamedField zero;
|
||||
}
|
||||
|
||||
static class IndexOnLevelZeroWithExplicityNamedField {
|
||||
|
||||
@Indexed @Field("customFieldName") String namedProperty;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Test resolution of {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class GeoSpatialIndexResolutionTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void geoSpatialIndexPathOnLevelZeroIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(GeoSpatialIndexOnLevelZero.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection("geoIndexedProperty", "Zero", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void geoSpatialIndexPathOnLevelOneIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(GeoSpatialIndexOnLevelOne.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection("zero.geoIndexedProperty", "One", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void depplyNestedGeoSpatialIndexPathIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(GeoSpatialIndexOnLevelTwo.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection("one.zero.geoIndexedProperty", "Two", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void resolvesIndexDefinitionOptionsCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(WithOptionsOnGeoSpatialIndexProperty.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
|
||||
assertThat(
|
||||
indexDefinition.getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("name", "location").add("min", 1).add("max", 100).add("bits", 2).get()));
|
||||
}
|
||||
|
||||
@Document(collection = "Zero")
|
||||
static class GeoSpatialIndexOnLevelZero {
|
||||
@GeoSpatialIndexed Point geoIndexedProperty;
|
||||
}
|
||||
|
||||
@Document(collection = "One")
|
||||
static class GeoSpatialIndexOnLevelOne {
|
||||
GeoSpatialIndexOnLevelZero zero;
|
||||
}
|
||||
|
||||
@Document(collection = "Two")
|
||||
static class GeoSpatialIndexOnLevelTwo {
|
||||
GeoSpatialIndexOnLevelOne one;
|
||||
}
|
||||
|
||||
@Document(collection = "WithOptionsOnGeoSpatialIndexProperty")
|
||||
static class WithOptionsOnGeoSpatialIndexProperty {
|
||||
|
||||
@GeoSpatialIndexed(collection = "CollectionOverride", bits = 2, max = 100, min = 1,
|
||||
type = GeoSpatialIndexType.GEO_2D)//
|
||||
Point location;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Test resolution of {@link CompoundIndexes}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class CompoundIndexResolutionTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void compoundIndexPathOnLevelZeroIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CompoundIndexOnLevelZero.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection(new String[] { "foo", "bar" }, "CompoundIndexOnLevelZero", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void compoundIndexOptionsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CompoundIndexOnLevelZero.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(indexDefinition.getIndexOptions(), equalTo(new BasicDBObjectBuilder().add("name", "compound_index")
|
||||
.add("unique", true).add("dropDups", true).add("sparse", true).add("background", true).get()));
|
||||
assertThat(indexDefinition.getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-909
|
||||
*/
|
||||
@Test
|
||||
public void compoundIndexOnSuperClassResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexDefinedOnSuperClass.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(indexDefinition.getIndexOptions(), equalTo(new BasicDBObjectBuilder().add("name", "compound_index")
|
||||
.add("unique", true).add("dropDups", true).add("sparse", true).add("background", true).get()));
|
||||
assertThat(indexDefinition.getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-827
|
||||
*/
|
||||
@Test
|
||||
public void compoundIndexDoesNotSpecifyNameWhenUsingGenerateName() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(ComountIndexWithAutogeneratedName.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(
|
||||
indexDefinition.getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("unique", true).add("dropDups", true).add("sparse", true)
|
||||
.add("background", true).get()));
|
||||
assertThat(indexDefinition.getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-929
|
||||
*/
|
||||
@Test
|
||||
public void compoundIndexPathOnLevelOneIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CompoundIndexOnLevelOne.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection(new String[] { "zero.foo", "zero.bar" }, "CompoundIndexOnLevelOne",
|
||||
indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-929
|
||||
*/
|
||||
@Test
|
||||
public void emptyCompoundIndexPathOnLevelOneIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CompoundIndexOnLevelOneWithEmptyIndexDefinition.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection(new String[] { "zero" }, "CompoundIndexOnLevelZeroWithEmptyIndexDef",
|
||||
indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-929
|
||||
*/
|
||||
@Test
|
||||
public void singleCompoundIndexPathOnLevelZeroIsResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(SingleCompoundIndex.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertIndexPathAndCollection(new String[] { "foo", "bar" }, "CompoundIndexOnLevelZero", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-963
|
||||
*/
|
||||
@Test
|
||||
public void compoundIndexShouldIncludeTTLWhenConsistingOfOnlyOneKey() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CompoundIndexWithOnlyOneKeyAndTTL.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(
|
||||
indexDefinition.getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("unique", true).add("dropDups", true).add("sparse", true)
|
||||
.add("background", true).add("expireAfterSeconds", 10L).get()));
|
||||
assertThat(indexDefinition.getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("foo", 1).get()));
|
||||
}
|
||||
|
||||
@Document(collection = "CompoundIndexOnLevelOne")
|
||||
static class CompoundIndexOnLevelOne {
|
||||
|
||||
CompoundIndexOnLevelZero zero;
|
||||
}
|
||||
|
||||
@Document(collection = "CompoundIndexOnLevelZeroWithEmptyIndexDef")
|
||||
static class CompoundIndexOnLevelOneWithEmptyIndexDefinition {
|
||||
|
||||
CompoundIndexOnLevelZeroWithEmptyIndexDef zero;
|
||||
}
|
||||
|
||||
@Document(collection = "CompoundIndexOnLevelZero")
|
||||
@CompoundIndexes({ @CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true,
|
||||
dropDups = true, expireAfterSeconds = 10, sparse = true, unique = true) })
|
||||
static class CompoundIndexOnLevelZero {}
|
||||
|
||||
@CompoundIndexes({ @CompoundIndex(name = "compound_index", background = true, dropDups = true,
|
||||
expireAfterSeconds = 10, sparse = true, unique = true) })
|
||||
static class CompoundIndexOnLevelZeroWithEmptyIndexDef {}
|
||||
|
||||
@Document(collection = "CompoundIndexOnLevelZero")
|
||||
@CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true, dropDups = true,
|
||||
expireAfterSeconds = 10, sparse = true, unique = true)
|
||||
static class SingleCompoundIndex {}
|
||||
|
||||
static class IndexDefinedOnSuperClass extends CompoundIndexOnLevelZero {
|
||||
|
||||
}
|
||||
|
||||
@Document(collection = "ComountIndexWithAutogeneratedName")
|
||||
@CompoundIndexes({ @CompoundIndex(useGeneratedName = true, def = "{'foo': 1, 'bar': -1}", background = true,
|
||||
dropDups = true, expireAfterSeconds = 10, sparse = true, unique = true) })
|
||||
static class ComountIndexWithAutogeneratedName {
|
||||
|
||||
}
|
||||
|
||||
@Document(collection = "CompoundIndexWithOnlyOneKeyAndTTL")
|
||||
@CompoundIndex(def = "{'foo': 1}", background = true, dropDups = true, expireAfterSeconds = 10, sparse = true,
|
||||
unique = true)
|
||||
static class CompoundIndexWithOnlyOneKeyAndTTL {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static class MixedIndexResolutionTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void multipleIndexesResolvedCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MixedIndexRoot.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat(indexDefinitions.get(0).getIndexDefinition(), instanceOf(Index.class));
|
||||
assertThat(indexDefinitions.get(1).getIndexDefinition(), instanceOf(GeospatialIndex.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void cyclicPropertyReferenceOverDBRefShouldNotBeTraversed() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(Inner.class);
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertThat(indexDefinitions.get(0).getIndexDefinition().getIndexKeys(),
|
||||
equalTo(new BasicDBObjectBuilder().add("outer", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-899
|
||||
*/
|
||||
@Test
|
||||
public void associationsShouldNotBeTraversed() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(Outer.class);
|
||||
assertThat(indexDefinitions, empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-926
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotRunIntoStackOverflow() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CycleStartingInBetween.class);
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-926
|
||||
*/
|
||||
@Test
|
||||
public void indexShouldBeFoundEvenForCyclePropertyReferenceOnLevelZero() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CycleLevelZero.class);
|
||||
assertIndexPathAndCollection("indexedProperty", "cycleLevelZero", indexDefinitions.get(0));
|
||||
assertIndexPathAndCollection("cyclicReference.indexedProperty", "cycleLevelZero", indexDefinitions.get(1));
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-926
|
||||
*/
|
||||
@Test
|
||||
public void indexShouldBeFoundEvenForCyclePropertyReferenceOnLevelOne() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CycleOnLevelOne.class);
|
||||
assertIndexPathAndCollection("reference.indexedProperty", "cycleOnLevelOne", indexDefinitions.get(0));
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-926
|
||||
*/
|
||||
@Test
|
||||
public void indexBeResolvedCorrectlyWhenPropertiesOfDifferentTypesAreNamedEqually() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(NoCycleButIdenticallyNamedProperties.class);
|
||||
assertIndexPathAndCollection("foo", "noCycleButIdenticallyNamedProperties", indexDefinitions.get(0));
|
||||
assertIndexPathAndCollection("reference.foo", "noCycleButIdenticallyNamedProperties", indexDefinitions.get(1));
|
||||
assertIndexPathAndCollection("reference.deep.foo", "noCycleButIdenticallyNamedProperties",
|
||||
indexDefinitions.get(2));
|
||||
assertThat(indexDefinitions, hasSize(3));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-949
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotDetectCycleInSimilarlyNamedProperties() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(SimilarityHolingBean.class);
|
||||
assertIndexPathAndCollection("norm", "similarityHolingBean", indexDefinitions.get(0));
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldDetectSelfCycleViaCollectionTypeCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(SelfCyclingViaCollectionType.class);
|
||||
assertThat(indexDefinitions, IsEmptyCollection.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotDetectCycleWhenTypeIsUsedMoreThanOnce() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultipleObjectsOfSameType.class);
|
||||
assertThat(indexDefinitions, IsEmptyCollection.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldCatchCyclicReferenceExceptionOnRoot() {
|
||||
|
||||
Document documentDummy = new Document() {
|
||||
|
||||
@Override
|
||||
public Class<? extends Annotation> annotationType() {
|
||||
return Document.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String collection() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
MongoPersistentProperty propertyMock = mock(MongoPersistentProperty.class);
|
||||
when(propertyMock.isEntity()).thenReturn(true);
|
||||
when(propertyMock.getActualType()).thenThrow(
|
||||
new MongoPersistentEntityIndexResolver.CyclicPropertyReferenceException("foo", Object.class, "bar"));
|
||||
|
||||
MongoPersistentEntity<SelfCyclingViaCollectionType> dummy = MongoPersistentEntityDummyBuilder
|
||||
.forClass(SelfCyclingViaCollectionType.class).withCollection("foo").and(propertyMock)
|
||||
.and(documentDummy).build();
|
||||
|
||||
new MongoPersistentEntityIndexResolver(prepareMappingContext(SelfCyclingViaCollectionType.class))
|
||||
.resolveIndexForEntity(dummy);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUsePathIndexAsIndexNameForDocumentsHavingNamedNestedCompoundIndexFixedOnCollection() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNestedDocumentHavingNamedCompoundIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("propertyOfTypeHavingNamedCompoundIndex.c_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseIndexNameForNestedTypesWithNamedCompoundIndexDefinition() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNestedTypeHavingNamedCompoundIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("propertyOfTypeHavingNamedCompoundIndex.c_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUsePathIndexAsIndexNameForDocumentsHavingNamedNestedIndexFixedOnCollection() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNestedDocumentHavingNamedIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("propertyOfTypeHavingNamedIndex.property_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseIndexNameForNestedTypesWithNamedIndexDefinition() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNestedTypeHavingNamedIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("propertyOfTypeHavingNamedIndex.property_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseIndexNameOnRootLevel() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNamedIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("property_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1087
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnRoot() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultiplePropertiesOfSameTypeWithMatchingStartLetters.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("name.component"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("nameLast.component"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1087
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("component.nameLast"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("component.name"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1121
|
||||
*/
|
||||
@Test
|
||||
public void shouldOnlyConsiderEntitiesAsPotentialCycleCandidates() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("path1.foo"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"),
|
||||
equalTo("path2.propertyWithIndexedStructure.foo"));
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
static class MixedIndexRoot {
|
||||
|
||||
@Indexed String first;
|
||||
NestedGeoIndex nestedGeo;
|
||||
}
|
||||
|
||||
static class NestedGeoIndex {
|
||||
|
||||
@GeoSpatialIndexed Point location;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class Outer {
|
||||
|
||||
@DBRef Inner inner;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class Inner {
|
||||
|
||||
@Indexed Outer outer;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class CycleLevelZero {
|
||||
|
||||
@Indexed String indexedProperty;
|
||||
CycleLevelZero cyclicReference;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class CycleOnLevelOne {
|
||||
|
||||
CycleOnLevelOneReferenced reference;
|
||||
}
|
||||
|
||||
static class CycleOnLevelOneReferenced {
|
||||
|
||||
@Indexed String indexedProperty;
|
||||
CycleOnLevelOne cyclicReference;
|
||||
}
|
||||
|
||||
@Document
|
||||
public static class CycleStartingInBetween {
|
||||
|
||||
CycleOnLevelOne referenceToCycleStart;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class NoCycleButIdenticallyNamedProperties {
|
||||
|
||||
@Indexed String foo;
|
||||
NoCycleButIdenticallyNamedPropertiesNested reference;
|
||||
}
|
||||
|
||||
static class NoCycleButIdenticallyNamedPropertiesNested {
|
||||
|
||||
@Indexed String foo;
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested deep;
|
||||
}
|
||||
|
||||
static class NoCycleButIndenticallNamedPropertiesDeeplyNested {
|
||||
|
||||
@Indexed String foo;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class SimilarityHolingBean {
|
||||
|
||||
@Indexed @Field("norm") String normalProperty;
|
||||
@Field("similarityL") private List<SimilaritySibling> listOfSimilarilyNamedEntities = null;
|
||||
}
|
||||
|
||||
static class SimilaritySibling {
|
||||
@Field("similarity") private String similarThoughNotEqualNamedProperty;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class MultipleObjectsOfSameType {
|
||||
|
||||
SelfCyclingViaCollectionType cycleOne;
|
||||
|
||||
SelfCyclingViaCollectionType cycleTwo;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class SelfCyclingViaCollectionType {
|
||||
|
||||
List<SelfCyclingViaCollectionType> cyclic;
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
@CompoundIndex(name = "c_index", def = "{ foo:1, bar:1 }")
|
||||
static class DocumentWithNamedCompoundIndex {
|
||||
|
||||
String property;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNamedIndex {
|
||||
|
||||
@Indexed(name = "property_index") String property;
|
||||
}
|
||||
|
||||
static class TypeWithNamedIndex {
|
||||
|
||||
@Indexed(name = "property_index") String property;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNestedDocumentHavingNamedCompoundIndex {
|
||||
|
||||
DocumentWithNamedCompoundIndex propertyOfTypeHavingNamedCompoundIndex;
|
||||
}
|
||||
|
||||
@CompoundIndex(name = "c_index", def = "{ foo:1, bar:1 }")
|
||||
static class TypeWithNamedCompoundIndex {
|
||||
String property;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNestedTypeHavingNamedCompoundIndex {
|
||||
|
||||
TypeWithNamedCompoundIndex propertyOfTypeHavingNamedCompoundIndex;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNestedDocumentHavingNamedIndex {
|
||||
|
||||
DocumentWithNamedIndex propertyOfTypeHavingNamedIndex;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNestedTypeHavingNamedIndex {
|
||||
|
||||
TypeWithNamedIndex propertyOfTypeHavingNamedIndex;
|
||||
}
|
||||
|
||||
@Document
|
||||
public class MultiplePropertiesOfSameTypeWithMatchingStartLetters {
|
||||
|
||||
public class NameComponent {
|
||||
|
||||
@Indexed String component;
|
||||
}
|
||||
|
||||
NameComponent name;
|
||||
NameComponent nameLast;
|
||||
}
|
||||
|
||||
@Document
|
||||
public class MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty {
|
||||
|
||||
public class NameComponent {
|
||||
|
||||
@Indexed String nameLast;
|
||||
@Indexed String name;
|
||||
}
|
||||
|
||||
NameComponent component;
|
||||
}
|
||||
|
||||
@Document
|
||||
public static class OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths {
|
||||
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested path1;
|
||||
AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument path2;
|
||||
}
|
||||
|
||||
public static class AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument {
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested propertyWithIndexedStructure;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static List<IndexDefinitionHolder> prepareMappingContextAndResolveIndexForType(Class<?> type) {
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(type);
|
||||
MongoPersistentEntityIndexResolver resolver = new MongoPersistentEntityIndexResolver(mappingContext);
|
||||
return resolver.resolveIndexForEntity(mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
private static MongoMappingContext prepareMappingContext(Class<?> type) {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(Collections.singleton(type));
|
||||
mappingContext.initialize();
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
private static void assertIndexPathAndCollection(String expectedPath, String expectedCollection,
|
||||
IndexDefinitionHolder holder) {
|
||||
|
||||
assertIndexPathAndCollection(new String[] { expectedPath }, expectedCollection, holder);
|
||||
}
|
||||
|
||||
private static void assertIndexPathAndCollection(String[] expectedPaths, String expectedCollection,
|
||||
IndexDefinitionHolder holder) {
|
||||
|
||||
for (String expectedPath : expectedPaths) {
|
||||
assertThat(holder.getIndexDefinition().getIndexKeys().containsField(expectedPath), equalTo(true));
|
||||
}
|
||||
assertThat(holder.getCollection(), equalTo(expectedCollection));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link Path}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class PathUnitTests {
|
||||
|
||||
@Mock MongoPersistentEntity<?> entityMock;
|
||||
|
||||
@Before
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public void setUp() {
|
||||
when(entityMock.getType()).thenReturn((Class) Object.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldIdentifyCycleForOwnerOfSameTypeAndMatchingPath() {
|
||||
|
||||
MongoPersistentProperty property = createPersistentPropertyMock(entityMock, "foo");
|
||||
assertThat(new Path(property, "foo.bar").cycles(property, "foo.bar.bar"), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("rawtypes")
|
||||
public void shouldAllowMatchingPathForDifferentOwners() {
|
||||
|
||||
MongoPersistentProperty existing = createPersistentPropertyMock(entityMock, "foo");
|
||||
|
||||
MongoPersistentEntity entityOfDifferentType = Mockito.mock(MongoPersistentEntity.class);
|
||||
when(entityOfDifferentType.getType()).thenReturn(String.class);
|
||||
MongoPersistentProperty toBeVerified = createPersistentPropertyMock(entityOfDifferentType, "foo");
|
||||
|
||||
assertThat(new Path(existing, "foo.bar").cycles(toBeVerified, "foo.bar.bar"), is(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowEqaulPropertiesOnDifferentPaths() {
|
||||
|
||||
MongoPersistentProperty property = createPersistentPropertyMock(entityMock, "foo");
|
||||
assertThat(new Path(property, "foo.bar").cycles(property, "foo2.bar.bar"), is(false));
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private MongoPersistentProperty createPersistentPropertyMock(MongoPersistentEntity owner, String fieldname) {
|
||||
|
||||
MongoPersistentProperty property = Mockito.mock(MongoPersistentProperty.class);
|
||||
when(property.getOwner()).thenReturn(owner);
|
||||
when(property.getFieldName()).thenReturn(fieldname);
|
||||
return property;
|
||||
}
|
||||
}
|
||||
@@ -103,6 +103,7 @@ public class MongoMappingContextUnitTests {
|
||||
exception.expectMessage("firstname");
|
||||
exception.expectMessage("lastname");
|
||||
exception.expectMessage("foo");
|
||||
exception.expectMessage("@Field");
|
||||
|
||||
MongoMappingContext context = new MongoMappingContext();
|
||||
context.setApplicationContext(applicationContext);
|
||||
|
||||
@@ -0,0 +1,212 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.SimpleAssociationHandler;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
/**
|
||||
* Trivial dummy implementation of {@link MongoPersistentEntity} to be used in tests.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @param <T>
|
||||
*/
|
||||
public class MongoPersistentEntityTestDummy<T> implements MongoPersistentEntity<T> {
|
||||
|
||||
private Map<Class<?>, Annotation> annotations = new HashMap<Class<?>, Annotation>();
|
||||
private Collection<MongoPersistentProperty> properties = new ArrayList<MongoPersistentProperty>();
|
||||
private String collection;
|
||||
private String name;
|
||||
private Class<T> type;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PreferredConstructor<T, MongoPersistentProperty> getPersistenceConstructor() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isConstructorArgument(PersistentProperty<?> property) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isIdProperty(PersistentProperty<?> property) {
|
||||
return property != null ? property.isIdProperty() : false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isVersionProperty(PersistentProperty<?> property) {
|
||||
return property != null ? property.isIdProperty() : false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getIdProperty() {
|
||||
return getPersistentProperty(Id.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getVersionProperty() {
|
||||
return getPersistentProperty(Version.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getPersistentProperty(String name) {
|
||||
|
||||
for (MongoPersistentProperty p : this.properties) {
|
||||
if (p.getName().equals(name)) {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getPersistentProperty(Class<? extends Annotation> annotationType) {
|
||||
|
||||
for (MongoPersistentProperty p : this.properties) {
|
||||
if (p.isAnnotationPresent(annotationType)) {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasIdProperty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasVersionProperty() {
|
||||
return getVersionProperty() != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getType() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getTypeAlias() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeInformation<T> getTypeInformation() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithProperties(PropertyHandler<MongoPersistentProperty> handler) {
|
||||
|
||||
for (MongoPersistentProperty p : this.properties) {
|
||||
handler.doWithPersistentProperty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithProperties(SimplePropertyHandler handler) {
|
||||
|
||||
for (MongoPersistentProperty p : this.properties) {
|
||||
handler.doWithPersistentProperty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithAssociations(AssociationHandler<MongoPersistentProperty> handler) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithAssociations(SimpleAssociationHandler handler) {
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public <A extends Annotation> A findAnnotation(Class<A> annotationType) {
|
||||
return (A) this.annotations.get(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCollection() {
|
||||
return this.collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple builder to create {@link MongoPersistentEntityTestDummy} with defined properties.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @param <T>
|
||||
*/
|
||||
public static class MongoPersistentEntityDummyBuilder<T> {
|
||||
|
||||
private MongoPersistentEntityTestDummy<T> instance;
|
||||
|
||||
private MongoPersistentEntityDummyBuilder(Class<T> type) {
|
||||
this.instance = new MongoPersistentEntityTestDummy<T>();
|
||||
this.instance.type = type;
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public static <T> MongoPersistentEntityDummyBuilder<T> forClass(Class<T> type) {
|
||||
return new MongoPersistentEntityDummyBuilder(type);
|
||||
}
|
||||
|
||||
public MongoPersistentEntityDummyBuilder<T> withName(String name) {
|
||||
this.instance.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MongoPersistentEntityDummyBuilder<T> and(MongoPersistentProperty property) {
|
||||
this.instance.properties.add(property);
|
||||
return this;
|
||||
}
|
||||
|
||||
public MongoPersistentEntityDummyBuilder<T> withCollection(String collection) {
|
||||
this.instance.collection = collection;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MongoPersistentEntityDummyBuilder<T> and(Annotation annotation) {
|
||||
this.instance.annotations.put(annotation.annotationType(), annotation);
|
||||
return this;
|
||||
}
|
||||
|
||||
public MongoPersistentEntityTestDummy<T> build() {
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link SnakeCaseFieldNamingStrategy}.
|
||||
*
|
||||
* @author Ryan Tenney
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class SnakeCaseFieldNamingStrategyUnitTests {
|
||||
|
||||
FieldNamingStrategy strategy = new SnakeCaseFieldNamingStrategy();
|
||||
|
||||
@Mock MongoPersistentProperty property;
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-866
|
||||
*/
|
||||
@Test
|
||||
public void rendersSnakeCaseFieldNames() {
|
||||
|
||||
assertFieldNameForPropertyName("fooBar", "foo_bar");
|
||||
assertFieldNameForPropertyName("FooBar", "foo_bar");
|
||||
assertFieldNameForPropertyName("foo_bar", "foo_bar");
|
||||
assertFieldNameForPropertyName("FOO_BAR", "foo_bar");
|
||||
}
|
||||
|
||||
private void assertFieldNameForPropertyName(String propertyName, String fieldName) {
|
||||
|
||||
when(property.getName()).thenReturn(propertyName);
|
||||
assertThat(strategy.getFieldName(property), is(fieldName));
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,17 +16,23 @@
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.data.mongodb.core.mapping.PersonPojoStringId;
|
||||
|
||||
public class PersonBeforeSaveListener implements ApplicationListener<BeforeSaveEvent<PersonPojoStringId>> {
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
public final ArrayList<ApplicationEvent> seenEvents = new ArrayList<ApplicationEvent>();
|
||||
public class PersonBeforeSaveListener extends AbstractMongoEventListener<PersonPojoStringId> {
|
||||
|
||||
public void onApplicationEvent(BeforeSaveEvent<PersonPojoStringId> event) {
|
||||
this.seenEvents.add(event);
|
||||
public final List<ApplicationEvent> seenEvents = new ArrayList<ApplicationEvent>();
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(java.lang.Object, com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeSave(PersonPojoStringId source, DBObject dbo) {
|
||||
seenEvents.add(new BeforeSaveEvent<PersonPojoStringId>(source, dbo));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,6 +32,7 @@ import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.geo.Box;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
@@ -50,6 +51,7 @@ import com.mongodb.Mongo;
|
||||
* Integration test for {@link MongoTemplate}'s Map-Reduce operations
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
@@ -276,6 +278,31 @@ public class MapReduceTests {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-938
|
||||
*/
|
||||
@Test
|
||||
public void mapReduceShouldUseQueryMapper() {
|
||||
|
||||
DBCollection c = mongoTemplate.getDb().getCollection("jmrWithGeo");
|
||||
|
||||
c.save(new BasicDBObject("x", new String[] { "a", "b" }).append("loc", new double[] { 0, 0 }));
|
||||
c.save(new BasicDBObject("x", new String[] { "b", "c" }).append("loc", new double[] { 0, 0 }));
|
||||
c.save(new BasicDBObject("x", new String[] { "c", "d" }).append("loc", new double[] { 0, 0 }));
|
||||
|
||||
Query query = new Query(where("x").ne(new String[] { "a", "b" }).and("loc")
|
||||
.within(new Box(new double[] { 0, 0 }, new double[] { 1, 1 })));
|
||||
|
||||
MapReduceResults<ValueObject> results = template.mapReduce(query, "jmrWithGeo", mapFunction, reduceFunction,
|
||||
ValueObject.class);
|
||||
|
||||
Map<String, Float> m = copyToMap(results);
|
||||
assertEquals(3, m.size());
|
||||
assertEquals(1, m.get("b").intValue());
|
||||
assertEquals(2, m.get("c").intValue());
|
||||
assertEquals(1, m.get("d").intValue());
|
||||
}
|
||||
|
||||
private void performMapReduce(boolean inline, boolean withQuery) {
|
||||
createMapReduceData();
|
||||
MapReduceResults<ValueObject> results;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,12 +22,14 @@ import org.junit.Test;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class CriteriaTests {
|
||||
|
||||
@Test
|
||||
@@ -72,50 +74,94 @@ public class CriteriaTests {
|
||||
assertThat(left, is(not(right)));
|
||||
assertThat(right, is(not(left)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateAndOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.andOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateOrOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.orOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateNorOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.norOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test
|
||||
public void shouldNegateFollowingSimpleExpression() {
|
||||
|
||||
Criteria c = Criteria.where("age").not().gt(18).and("status").is("student");
|
||||
DBObject co = c.getCriteriaObject();
|
||||
|
||||
assertThat(co, is(notNullValue()));
|
||||
assertThat(co.toString(), is("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateAndOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.andOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateOrOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.orOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateNorOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.norOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test
|
||||
public void shouldNegateFollowingSimpleExpression() {
|
||||
|
||||
Criteria c = Criteria.where("age").not().gt(18).and("status").is("student");
|
||||
DBObject co = c.getCriteriaObject();
|
||||
|
||||
assertThat(co, is(notNullValue()));
|
||||
assertThat(co.toString(), is("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldReturnEmptyDBOWhenNoCriteriaSpecified() {
|
||||
|
||||
DBObject dbo = new Criteria().getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$lt", "foo").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").gt("bar").getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$lt", "foo").add("$gt", "bar").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").not().getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$not", new BasicDBObject("$lt", "foo")).get()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import static org.junit.Assert.*;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
@@ -30,6 +31,7 @@ import org.junit.Test;
|
||||
* @author Thomas Risberg
|
||||
* @author Becca Gaspard
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class UpdateTests {
|
||||
|
||||
@@ -284,4 +286,71 @@ public class UpdateTests {
|
||||
public void testCreatingUpdateWithNullKeyThrowsException() {
|
||||
Update.update(null, "value");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-953
|
||||
*/
|
||||
@Test
|
||||
public void testEquality() {
|
||||
|
||||
Update actualUpdate = new Update() //
|
||||
.inc("size", 1) //
|
||||
.set("nl", null) //
|
||||
.set("directory", "/Users/Test/Desktop") //
|
||||
.push("authors", Collections.singletonMap("name", "Sven")) //
|
||||
.pop("authors", Update.Position.FIRST) //
|
||||
.set("foo", "bar");
|
||||
|
||||
Update expectedUpdate = new Update() //
|
||||
.inc("size", 1) //
|
||||
.set("nl", null) //
|
||||
.set("directory", "/Users/Test/Desktop") //
|
||||
.push("authors", Collections.singletonMap("name", "Sven")) //
|
||||
.pop("authors", Update.Position.FIRST) //
|
||||
.set("foo", "bar");
|
||||
|
||||
assertThat(actualUpdate, is(equalTo(actualUpdate)));
|
||||
assertThat(actualUpdate.hashCode(), is(equalTo(actualUpdate.hashCode())));
|
||||
assertThat(actualUpdate, is(equalTo(expectedUpdate)));
|
||||
assertThat(actualUpdate.hashCode(), is(equalTo(expectedUpdate.hashCode())));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-953
|
||||
*/
|
||||
@Test
|
||||
public void testToString() {
|
||||
|
||||
Update actualUpdate = new Update() //
|
||||
.inc("size", 1) //
|
||||
.set("nl", null) //
|
||||
.set("directory", "/Users/Test/Desktop") //
|
||||
.push("authors", Collections.singletonMap("name", "Sven")) //
|
||||
.pop("authors", Update.Position.FIRST) //
|
||||
.set("foo", "bar");
|
||||
|
||||
Update expectedUpdate = new Update() //
|
||||
.inc("size", 1) //
|
||||
.set("nl", null) //
|
||||
.set("directory", "/Users/Test/Desktop") //
|
||||
.push("authors", Collections.singletonMap("name", "Sven")) //
|
||||
.pop("authors", Update.Position.FIRST) //
|
||||
.set("foo", "bar");
|
||||
|
||||
assertThat(actualUpdate.toString(), is(equalTo(expectedUpdate.toString())));
|
||||
assertThat(actualUpdate.toString(), is("{ \"$inc\" : { \"size\" : 1} ," //
|
||||
+ " \"$set\" : { \"nl\" : null , \"directory\" : \"/Users/Test/Desktop\" , \"foo\" : \"bar\"} , " //
|
||||
+ "\"$push\" : { \"authors\" : { \"name\" : \"Sven\"}} " //
|
||||
+ ", \"$pop\" : { \"authors\" : -1}}")); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1002
|
||||
*/
|
||||
@Test
|
||||
public void toStringWorksForUpdateWithComplexObject() {
|
||||
|
||||
Update update = new Update().addToSet("key", new DateTime());
|
||||
assertThat(update.toString(), is(notNullValue()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -863,4 +864,115 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
public void deleteByUsingAnnotatedQueryShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() {
|
||||
assertThat(repository.removePersonByLastnameUsingAnnotatedQuery("Beauford"), is(1L));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-893
|
||||
*/
|
||||
@Test
|
||||
public void findByNestedPropertyInCollectionShouldFindMatchingDocuments() {
|
||||
|
||||
Person p = new Person("Mary", "Poppins");
|
||||
Address adr = new Address("some", "2", "where");
|
||||
p.setAddress(adr);
|
||||
|
||||
repository.save(p);
|
||||
|
||||
Page<Person> result = repository.findByAddressIn(Arrays.asList(adr), new PageRequest(0, 10));
|
||||
|
||||
assertThat(result.getContent(), hasSize(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-745
|
||||
*/
|
||||
@Test
|
||||
public void findByCustomQueryFirstnamesInListAndLastname() {
|
||||
|
||||
repository.save(new Person("foo", "bar"));
|
||||
repository.save(new Person("bar", "bar"));
|
||||
repository.save(new Person("fuu", "bar"));
|
||||
repository.save(new Person("notfound", "bar"));
|
||||
|
||||
Page<Person> result = repository.findByCustomQueryFirstnamesAndLastname(Arrays.asList("bar", "foo", "fuu"), "bar",
|
||||
new PageRequest(0, 2));
|
||||
|
||||
assertThat(result.getContent(), hasSize(2));
|
||||
assertThat(result.getTotalPages(), is(2));
|
||||
assertThat(result.getTotalElements(), is(3L));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-745
|
||||
*/
|
||||
@Test
|
||||
public void findByCustomQueryLastnameAndStreetInList() {
|
||||
|
||||
repository.save(new Person("foo", "bar").withAddress(new Address("street1", "1", "SB")));
|
||||
repository.save(new Person("bar", "bar").withAddress(new Address("street2", "1", "SB")));
|
||||
repository.save(new Person("fuu", "bar").withAddress(new Address("street1", "2", "RGB")));
|
||||
repository.save(new Person("notfound", "notfound"));
|
||||
|
||||
Page<Person> result = repository.findByCustomQueryLastnameAndAddressStreetInList("bar",
|
||||
Arrays.asList("street1", "street2"), new PageRequest(0, 2));
|
||||
|
||||
assertThat(result.getContent(), hasSize(2));
|
||||
assertThat(result.getTotalPages(), is(2));
|
||||
assertThat(result.getTotalElements(), is(3L));
|
||||
}
|
||||
|
||||
/**
|
||||
* Ignored for now as this requires Querydsl 3.4.1 to succeed.
|
||||
*
|
||||
* @see DATAMONGO-972
|
||||
*/
|
||||
@Test
|
||||
@Ignore
|
||||
public void shouldExecuteFindOnDbRefCorrectly() {
|
||||
|
||||
operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class);
|
||||
|
||||
User user = new User();
|
||||
user.setUsername("Valerie Matthews");
|
||||
|
||||
operations.save(user);
|
||||
|
||||
dave.setCreator(user);
|
||||
operations.save(dave);
|
||||
|
||||
assertThat(repository.findOne(QPerson.person.creator.eq(user)), is(dave));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-969
|
||||
*/
|
||||
@Test
|
||||
public void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() {
|
||||
assertThat(repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id))), containsInAnyOrder(dave, carter));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1030
|
||||
*/
|
||||
@Test
|
||||
public void executesSingleEntityQueryWithProjectionCorrectly() {
|
||||
|
||||
PersonSummary result = repository.findSummaryByLastname("Beauford");
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.firstname, is("Carter"));
|
||||
assertThat(result.lastname, is("Beauford"));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1072
|
||||
*/
|
||||
@Test
|
||||
public void shouldBindPlaceholdersUsedAsKeysCorrectly() {
|
||||
|
||||
List<Person> persons = repository.findByKeyValue("firstname", alicia.getFirstname());
|
||||
|
||||
assertThat(persons, hasSize(1));
|
||||
assertThat(persons, hasItem(alicia));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,132 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class ComplexIdRepositoryIntegrationTests {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
static class Config extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "complexIdTest";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Autowired UserWithComplexIdRepository repo;
|
||||
@Autowired MongoTemplate template;
|
||||
|
||||
MyId id;
|
||||
UserWithComplexId userWithId;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
repo.deleteAll();
|
||||
|
||||
id = new MyId();
|
||||
id.val1 = "v1";
|
||||
id.val2 = "v2";
|
||||
|
||||
userWithId = new UserWithComplexId();
|
||||
userWithId.firstname = "foo";
|
||||
userWithId.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void annotatedFindQueryShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
assertThat(repo.getUserByComplexId(id), is(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void annotatedFindQueryShouldWorkWhenUsingComplexIdWithinCollection() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
List<UserWithComplexId> loaded = repo.findByUserIds(Collections.singleton(id));
|
||||
|
||||
assertThat(loaded, hasSize(1));
|
||||
assertThat(loaded, contains(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void findOneShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
assertThat(repo.findOne(id), is(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void findAllShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
Iterable<UserWithComplexId> loaded = repo.findAll(Collections.singleton(id));
|
||||
|
||||
assertThat(loaded, is(Matchers.<UserWithComplexId> iterableWithSize(1)));
|
||||
assertThat(loaded, contains(userWithId));
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,7 @@ import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
/**
|
||||
* Sample contactt domain class.
|
||||
* Sample contact domain class.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class MyId implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -7129201311241750831L;
|
||||
|
||||
String val1;
|
||||
String val2;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 31;
|
||||
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(val1);
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(val2);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof MyId)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MyId that = (MyId) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.val1, that.val1) && ObjectUtils.nullSafeEquals(this.val2, that.val2);
|
||||
}
|
||||
}
|
||||
@@ -46,6 +46,8 @@ public class Person extends Contact {
|
||||
@SuppressWarnings("unused") private Sex sex;
|
||||
Date createdAt;
|
||||
|
||||
List<String> skills;
|
||||
|
||||
@GeoSpatialIndexed private Point location;
|
||||
|
||||
private Address address;
|
||||
@@ -261,6 +263,24 @@ public class Person extends Contact {
|
||||
return this.getId().equals(that.getId());
|
||||
}
|
||||
|
||||
public Person withAddress(Address address) {
|
||||
|
||||
this.address = address;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void setCreator(User creator) {
|
||||
this.creator = creator;
|
||||
}
|
||||
|
||||
public void setSkills(List<String> skills) {
|
||||
this.skills = skills;
|
||||
}
|
||||
|
||||
public List<String> getSkills() {
|
||||
return skills;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
|
||||
@@ -286,4 +286,28 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
@Query(value = "{ 'lastname' : ?0 }", delete = true)
|
||||
Long removePersonByLastnameUsingAnnotatedQuery(String lastname);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-893
|
||||
*/
|
||||
Page<Person> findByAddressIn(List<Address> address, Pageable page);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-745
|
||||
*/
|
||||
@Query("{firstname:{$in:?0}, lastname:?1}")
|
||||
Page<Person> findByCustomQueryFirstnamesAndLastname(List<String> firstnames, String lastname, Pageable page);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-745
|
||||
*/
|
||||
@Query("{lastname:?0, address.street:{$in:?1}}")
|
||||
Page<Person> findByCustomQueryLastnameAndAddressStreetInList(String lastname, List<String> streetNames, Pageable page);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1030
|
||||
*/
|
||||
PersonSummary findSummaryByLastname(String lastname);
|
||||
|
||||
@Query("{ ?0 : ?1 }")
|
||||
List<Person> findByKeyValue(String key, String value);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class PersonSummary {
|
||||
|
||||
String firstname;
|
||||
String lastname;
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Document
|
||||
public class UserWithComplexId {
|
||||
|
||||
@Id MyId id;
|
||||
String firstname;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 31;
|
||||
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(id);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof UserWithComplexId)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UserWithComplexId that = (UserWithComplexId) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.id, that.id);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.repository.CrudRepository;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface UserWithComplexIdRepository extends CrudRepository<UserWithComplexId, MyId> {
|
||||
|
||||
@Query("{'_id': {$in: ?0}}")
|
||||
List<UserWithComplexId> findByUserIds(Collection<MyId> ids);
|
||||
|
||||
@Query("{'_id': ?0}")
|
||||
UserWithComplexId getUserByComplexId(MyId id);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user