Compare commits
103 Commits
issue/4426
...
1.5.x
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
13823690fc | ||
|
|
fc0187eb8e | ||
|
|
07bf5a4f7e | ||
|
|
274adf320d | ||
|
|
aed983462f | ||
|
|
0f5179eb26 | ||
|
|
df40adb618 | ||
|
|
339d8e6f14 | ||
|
|
292e645387 | ||
|
|
df7c007e9e | ||
|
|
9f3465f63c | ||
|
|
2e7c166d38 | ||
|
|
ac7e3a708a | ||
|
|
fa50f74729 | ||
|
|
2aba831389 | ||
|
|
33c665fb98 | ||
|
|
55f83fa0f2 | ||
|
|
13f2472e2f | ||
|
|
5de9994093 | ||
|
|
4d24742181 | ||
|
|
4af876152c | ||
|
|
872e706d2a | ||
|
|
51c66723e9 | ||
|
|
ccfab5d91a | ||
|
|
c94c3f3791 | ||
|
|
4924ff54b6 | ||
|
|
edd9b58d1c | ||
|
|
dc6cbde842 | ||
|
|
aaaf7e57de | ||
|
|
76538aeff1 | ||
|
|
1181a21e51 | ||
|
|
c7f07d2386 | ||
|
|
054ad6fc12 | ||
|
|
1b4ecac732 | ||
|
|
1ef03bf707 | ||
|
|
a1febdb541 | ||
|
|
79fdf44d24 | ||
|
|
d8bb644b30 | ||
|
|
73db7b06b9 | ||
|
|
8483f36f48 | ||
|
|
b426bdd64a | ||
|
|
3349454a51 | ||
|
|
c04343a764 | ||
|
|
05311ea118 | ||
|
|
a18633f847 | ||
|
|
29e2f87ee1 | ||
|
|
5c5accd12d | ||
|
|
08e534adcd | ||
|
|
914acfea16 | ||
|
|
a6728f851b | ||
|
|
81a7515cb7 | ||
|
|
3f4087dc13 | ||
|
|
0430962861 | ||
|
|
8bb62a65db | ||
|
|
d6defbcb56 | ||
|
|
ed47850f71 | ||
|
|
ca1cfdf659 | ||
|
|
95d31d5bb7 | ||
|
|
a7c3ef2aa8 | ||
|
|
3320e49c0b | ||
|
|
286efca52d | ||
|
|
92926befc9 | ||
|
|
703f24ae1c | ||
|
|
febe703954 | ||
|
|
440d16ebc6 | ||
|
|
11c2e90736 | ||
|
|
816a567f29 | ||
|
|
e35486759b | ||
|
|
b80c81f861 | ||
|
|
005d21c0b6 | ||
|
|
3c8b7a54d6 | ||
|
|
b2d59f2539 | ||
|
|
1cc2830e95 | ||
|
|
9a4d6f6fb7 | ||
|
|
e96db8b69b | ||
|
|
13ce75779f | ||
|
|
cc785ecf4f | ||
|
|
41fe1809da | ||
|
|
fd5d39f4d9 | ||
|
|
71d97ff53a | ||
|
|
f2f3faef08 | ||
|
|
b1a488098b | ||
|
|
fd18a8b82c | ||
|
|
761d2748c5 | ||
|
|
e2f3966763 | ||
|
|
2b3e5461c2 | ||
|
|
ca4db459c4 | ||
|
|
e0d0a5dc31 | ||
|
|
18761a39c4 | ||
|
|
ad25751dbb | ||
|
|
879ca6d149 | ||
|
|
8a40cf421c | ||
|
|
fbbb7b6bf7 | ||
|
|
0596403081 | ||
|
|
d9eb18df4d | ||
|
|
132e4a9839 | ||
|
|
4acf8caac1 | ||
|
|
9de3c88e6b | ||
|
|
6115c9562b | ||
|
|
963a222616 | ||
|
|
7a2de49ac1 | ||
|
|
8380806d7a | ||
|
|
e60479e47a |
@@ -11,7 +11,7 @@ For a comprehensive treatment of all the Spring Data MongoDB features, please re
|
||||
* the [User Guide](http://docs.spring.io/spring-data/mongodb/docs/current/reference/html/)
|
||||
* the [JavaDocs](http://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well.
|
||||
* the home page of [Spring Data MongoDB](http://projects.spring.io/spring-data-mongodb) contains links to articles and other resources.
|
||||
* for more detailed questions, use the [forum](http://forum.spring.io/forum/spring-projects/data/nosql).
|
||||
* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](http://stackoverflow.com/questions/tagged/spring-data-mongodb).
|
||||
|
||||
If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://projects.spring.io/).
|
||||
|
||||
@@ -139,7 +139,7 @@ public class MyService {
|
||||
|
||||
Here are some ways for you to get involved in the community:
|
||||
|
||||
* Get involved with the Spring community on the Spring Community Forums. Please help out on the [forum](http://forum.spring.io/forum/spring-projects/data/nosql) by responding to questions and joining the debate.
|
||||
* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](http://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate.
|
||||
* Create [JIRA](https://jira.springframework.org/browse/DATADOC) tickets for bugs and new features and comment and vote on the ones that you are interested in.
|
||||
* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing.
|
||||
* Watch for upcoming articles on Spring by [subscribing](http://spring.io/blog) to spring.io.
|
||||
|
||||
44
pom.xml
44
pom.xml
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,8 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.4.0.RELEASE</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
<version>1.4.7.BUILD-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -29,10 +28,11 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.8.0.RELEASE</springdata.commons>
|
||||
<mongo>2.12.1</mongo>
|
||||
<springdata.commons>1.8.7.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>2.12.5</mongo>
|
||||
<mongo.osgi>2.12.5</mongo.osgi>
|
||||
</properties>
|
||||
|
||||
|
||||
<developers>
|
||||
<developer>
|
||||
<id>ogierke</id>
|
||||
@@ -104,10 +104,28 @@
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
|
||||
<id>mongo-next</id>
|
||||
<properties>
|
||||
<mongo>2.12.0</mongo>
|
||||
<mongo>2.13.0</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo-3-next</id>
|
||||
<properties>
|
||||
<mongo>3.0.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
@@ -119,14 +137,14 @@
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-release</id>
|
||||
<url>http://repo.spring.io/libs-release</url>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
|
||||
7
spring-data-mongodb-cross-store/aop.xml
Normal file
7
spring-data-mongodb-cross-store/aop.xml
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<aspectj>
|
||||
<aspects>
|
||||
<aspect name="org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect" />
|
||||
<aspect name="org.springframework.data.mongodb.crossstore.MongoDocumentBacking" />
|
||||
</aspects>
|
||||
</aspectj>
|
||||
@@ -2,22 +2,22 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
|
||||
<properties>
|
||||
<jpa>1.0.0.Final</jpa>
|
||||
<hibernate>3.6.10.Final</hibernate>
|
||||
</properties>
|
||||
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -126,10 +126,11 @@
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</aspectLibrary>
|
||||
</aspectLibraries>
|
||||
</aspectLibraries>
|
||||
<complianceLevel>${source.level}</complianceLevel>
|
||||
<source>${source.level}</source>
|
||||
<target>${source.level}</target>
|
||||
<xmlConfigured>aop.xml</xmlConfigured>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
@@ -7,7 +7,7 @@ Import-Package:
|
||||
Export-Template:
|
||||
org.springframework.data.mongodb.crossstore.*;version="${project.version}"
|
||||
Import-Template:
|
||||
com.mongodb.*;version="${mongo:[=.=.=,+1.0.0)}",
|
||||
com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}",
|
||||
javax.persistence.*;version="${jpa:[=.=.=,+1.0.0)}",
|
||||
org.aspectj.*;version="${aspectj:[1.0.0, 2.0.0)}",
|
||||
org.bson.*;version="0",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,5 +5,5 @@ Bundle-ManifestVersion: 2
|
||||
Import-Package:
|
||||
sun.reflect;version="0";resolution:=optional
|
||||
Import-Template:
|
||||
com.mongodb.*;version="${mongo:[=.=.=,+1.0.0)}",
|
||||
com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}",
|
||||
org.apache.log4j.*;version="${log4j:[=.=.=,+1.0.0)}"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
|
||||
<name>Spring Data MongoDB - Core</name>
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
<version>1.5.7.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
@@ -77,7 +77,7 @@
|
||||
<version>1.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- CDI -->
|
||||
<dependency>
|
||||
<groupId>javax.enterprise</groupId>
|
||||
@@ -86,21 +86,21 @@
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.el</groupId>
|
||||
<artifactId>el-api</artifactId>
|
||||
<version>${cdi}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans.test</groupId>
|
||||
<artifactId>cditest-owb</artifactId>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>servlet-api</artifactId>
|
||||
@@ -115,7 +115,7 @@
|
||||
<version>${validation}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
@@ -129,23 +129,23 @@
|
||||
<version>4.2.0.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
|
||||
@@ -189,9 +189,14 @@
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -25,7 +25,7 @@ import com.mongodb.DBCursor;
|
||||
interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns th eprepared cursor.
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param cursor
|
||||
*/
|
||||
|
||||
@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.core;
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
@@ -41,6 +43,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
private static final Collection<String> TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere");
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
@@ -140,7 +143,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if ("2d".equals(value)) {
|
||||
if (TWO_D_IDENTIFIERS.contains(value)) {
|
||||
indexFields.add(IndexField.geo(key));
|
||||
} else {
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ public class MongoAction {
|
||||
* @param collectionName the collection name, must not be {@literal null} or empty.
|
||||
* @param entityType the POJO that is being operated against
|
||||
* @param document the converted DBObject from the POJO or Spring Update object
|
||||
* @param query the converted DBOjbect from the Spring Query object
|
||||
* @param query the converted DBObject from the Spring Query object
|
||||
*/
|
||||
public MongoAction(WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation,
|
||||
String collectionName, Class<?> entityType, DBObject document, DBObject query) {
|
||||
|
||||
@@ -23,11 +23,15 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
|
||||
import com.mongodb.MongoCursorNotFoundException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoException.CursorNotFound;
|
||||
import com.mongodb.MongoException.DuplicateKey;
|
||||
import com.mongodb.MongoException.Network;
|
||||
import com.mongodb.MongoInternalException;
|
||||
import com.mongodb.MongoServerSelectionException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.MongoTimeoutException;
|
||||
|
||||
/**
|
||||
* Simple {@link PersistenceExceptionTranslator} for Mongo. Convert the given runtime exception to an appropriate
|
||||
@@ -47,21 +51,23 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
|
||||
// Check for well-known MongoException subclasses.
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof DuplicateKey) {
|
||||
if (ex instanceof DuplicateKey || ex instanceof DuplicateKeyException) {
|
||||
return new DuplicateKeyException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof Network) {
|
||||
if (ex instanceof Network || ex instanceof MongoSocketException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof CursorNotFound) {
|
||||
if (ex instanceof CursorNotFound || ex instanceof MongoCursorNotFoundException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
// Driver 2.12 throws this to indicate connection problems. String comparison to avoid hard dependency
|
||||
if (ex.getClass().getName().equals("com.mongodb.MongoServerSelectionException")) {
|
||||
if (ex instanceof MongoServerSelectionException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoTimeoutException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
@@ -69,6 +75,7 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new InvalidDataAccessResourceUsageException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
int code = ((MongoException) ex).getCode();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -416,7 +416,9 @@ public interface MongoOperations {
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Will consider entity mapping
|
||||
* information to determine the collection the query is ran against.
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
@@ -425,7 +427,9 @@ public interface MongoOperations {
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}.
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
|
||||
@@ -335,7 +335,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
CommandResult result = execute(new DbCallback<CommandResult>() {
|
||||
public CommandResult doInDB(DB db) throws MongoException, DataAccessException {
|
||||
return db.command(command, options);
|
||||
return readPreference != null ? db.command(command, readPreference) : db.command(command);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -566,7 +566,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
BasicDBObject command = new BasicDBObject("geoNear", collection);
|
||||
command.putAll(near.toDBObject());
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, getDb().getOptions());
|
||||
List<Object> results = (List<Object>) commandResult.get("results");
|
||||
results = results == null ? Collections.emptyList() : results;
|
||||
|
||||
@@ -763,27 +763,33 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
protected <T> void doInsertAll(Collection<? extends T> listToSave, MongoWriter<T> writer) {
|
||||
Map<String, List<T>> objs = new HashMap<String, List<T>>();
|
||||
|
||||
for (T o : listToSave) {
|
||||
Map<String, List<T>> elementsByCollection = new HashMap<String, List<T>>();
|
||||
|
||||
for (T element : listToSave) {
|
||||
|
||||
if (element == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(element.getClass());
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(o.getClass());
|
||||
if (entity == null) {
|
||||
throw new InvalidDataAccessApiUsageException("No Persitent Entity information found for the class "
|
||||
+ o.getClass().getName());
|
||||
throw new InvalidDataAccessApiUsageException("No PersistentEntity information found for " + element.getClass());
|
||||
}
|
||||
|
||||
String collection = entity.getCollection();
|
||||
List<T> collectionElements = elementsByCollection.get(collection);
|
||||
|
||||
List<T> objList = objs.get(collection);
|
||||
if (null == objList) {
|
||||
objList = new ArrayList<T>();
|
||||
objs.put(collection, objList);
|
||||
if (null == collectionElements) {
|
||||
collectionElements = new ArrayList<T>();
|
||||
elementsByCollection.put(collection, collectionElements);
|
||||
}
|
||||
objList.add(o);
|
||||
|
||||
collectionElements.add(element);
|
||||
}
|
||||
|
||||
for (Map.Entry<String, List<T>> entry : objs.entrySet()) {
|
||||
for (Map.Entry<String, List<T>> entry : elementsByCollection.entrySet()) {
|
||||
doInsertBatch(entry.getKey(), entry.getValue(), this.mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -1007,8 +1013,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Calling update using query: " + queryObj + " and update: " + updateObj + " in collection: "
|
||||
+ collectionName);
|
||||
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName));
|
||||
}
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName,
|
||||
@@ -1070,17 +1076,22 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@link Entry} containing the {@link MongoPersistentProperty} defining the {@literal id} as
|
||||
* {@link Entry#getKey()} and the {@link Id}s property value as its {@link Entry#getValue()}.
|
||||
* Returns {@link Entry} containing the field name of the id property as {@link Entry#getKey()} and the {@link Id}s
|
||||
* property value as its {@link Entry#getValue()}.
|
||||
*
|
||||
* @param object
|
||||
* @return
|
||||
*/
|
||||
private Map.Entry<MongoPersistentProperty, Object> extractIdPropertyAndValue(Object object) {
|
||||
private Entry<String, Object> extractIdPropertyAndValue(Object object) {
|
||||
|
||||
Assert.notNull(object, "Id cannot be extracted from 'null'.");
|
||||
|
||||
Class<?> objectType = object.getClass();
|
||||
|
||||
if (object instanceof DBObject) {
|
||||
return Collections.singletonMap(ID_FIELD, ((DBObject) object).get(ID_FIELD)).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(objectType);
|
||||
MongoPersistentProperty idProp = entity == null ? null : entity.getIdProperty();
|
||||
|
||||
@@ -1090,7 +1101,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
Object idValue = BeanWrapper.create(object, mongoConverter.getConversionService())
|
||||
.getProperty(idProp, Object.class);
|
||||
return Collections.singletonMap(idProp, idValue).entrySet().iterator().next();
|
||||
return Collections.singletonMap(idProp.getFieldName(), idValue).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1101,8 +1112,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
private Query getIdQueryFor(Object object) {
|
||||
|
||||
Map.Entry<MongoPersistentProperty, Object> id = extractIdPropertyAndValue(object);
|
||||
return new Query(where(id.getKey().getFieldName()).is(id.getValue()));
|
||||
Entry<String, Object> id = extractIdPropertyAndValue(object);
|
||||
return new Query(where(id.getKey()).is(id.getValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1116,7 +1127,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Assert.notEmpty(objects, "Cannot create Query for empty collection.");
|
||||
|
||||
Iterator<?> it = objects.iterator();
|
||||
Map.Entry<MongoPersistentProperty, Object> firstEntry = extractIdPropertyAndValue(it.next());
|
||||
Entry<String, Object> firstEntry = extractIdPropertyAndValue(it.next());
|
||||
|
||||
ArrayList<Object> ids = new ArrayList<Object>(objects.size());
|
||||
ids.add(firstEntry.getValue());
|
||||
@@ -1125,7 +1136,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
ids.add(extractIdPropertyAndValue(it.next()).getValue());
|
||||
}
|
||||
|
||||
return new Query(where(firstEntry.getKey().getFieldName()).in(ids));
|
||||
return new Query(where(firstEntry.getKey()).in(ids));
|
||||
}
|
||||
|
||||
private void assertUpdateableIdIfNotSet(Object entity) {
|
||||
@@ -1182,7 +1193,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { dboq, collection.getName() });
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { serializeToJsonSafely(dboq),
|
||||
collection.getName() });
|
||||
}
|
||||
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) : collection.remove(dboq,
|
||||
@@ -1410,7 +1422,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, getDb().getOptions());
|
||||
handleCommandError(commandResult, command);
|
||||
|
||||
// map results
|
||||
@@ -1455,13 +1467,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
"Can not use skip or field specification with map reduce operations");
|
||||
}
|
||||
if (query.getQueryObject() != null) {
|
||||
copyMapReduceOptions.put("query", query.getQueryObject());
|
||||
copyMapReduceOptions.put("query", queryMapper.getMappedObject(query.getQueryObject(), null));
|
||||
}
|
||||
if (query.getLimit() > 0) {
|
||||
copyMapReduceOptions.put("limit", query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
copyMapReduceOptions.put("sort", query.getSortObject());
|
||||
copyMapReduceOptions.put("sort", queryMapper.getMappedObject(query.getSortObject(), null));
|
||||
}
|
||||
}
|
||||
return copyMapReduceOptions;
|
||||
@@ -1601,7 +1613,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback,
|
||||
@@ -1639,8 +1651,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Class<T> entityClass) {
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndRemove using query: " + query + " fields: " + fields + " sort: " + sort + " for class: "
|
||||
+ entityClass + " in collection: " + collectionName);
|
||||
LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), fields, sort, entityClass, collectionName));
|
||||
}
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
@@ -1664,8 +1676,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
+ " for class: " + entityClass + " and update: " + mappedUpdate + " in collection: " + collectionName);
|
||||
LOGGER.debug(String.format("findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
|
||||
+ "in collection: %s", serializeToJsonSafely(mappedQuery), fields, sort, entityClass,
|
||||
serializeToJsonSafely(mappedUpdate), collectionName));
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
@@ -1975,13 +1988,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
if (fields == null) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: " + query + " in db.collection: " + collection.getFullName());
|
||||
LOGGER.debug(String.format("findOne using query: %s in db.collection: %s", serializeToJsonSafely(query),
|
||||
collection.getFullName()));
|
||||
}
|
||||
return collection.findOne(query);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: " + query + " fields: " + fields + " in db.collection: "
|
||||
+ collection.getFullName());
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s in db.collection: %s",
|
||||
serializeToJsonSafely(query), fields, collection.getFullName()));
|
||||
}
|
||||
return collection.findOne(query, fields);
|
||||
}
|
||||
@@ -2179,7 +2193,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
cursorToUse = cursorToUse.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
cursorToUse = cursorToUse.sort(getMappedSortObject(query, type));
|
||||
DBObject sortDbo = type != null ? getMappedSortObject(query, type) : query.getSortObject();
|
||||
cursorToUse = cursorToUse.sort(sortDbo);
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
cursorToUse = cursorToUse.hint(query.getHint());
|
||||
|
||||
@@ -88,7 +88,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either sythetic or non-synthetic way.
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either synthetic or non-synthetic way.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param synthetic
|
||||
@@ -107,7 +107,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} with the given orignals and synthetics.
|
||||
* Creates a new {@link ExposedFields} with the given originals and synthetics.
|
||||
*
|
||||
* @param originals must not be {@literal null}.
|
||||
* @param synthetic must not be {@literal null}.
|
||||
@@ -363,7 +363,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the referenve value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* Returns the reference value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,7 +28,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
class LimitOperation implements AggregationOperation {
|
||||
public class LimitOperation implements AggregationOperation {
|
||||
|
||||
private final long maxElements;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -32,17 +33,29 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
public class MatchOperation implements AggregationOperation {
|
||||
|
||||
private final Criteria criteria;
|
||||
private final CriteriaDefinition criteriaDefinition;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link Criteria}.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @deprecated Use {@link MatchOperation#MatchOperation(CriteriaDefinition)} instead. This constructor is scheduled
|
||||
* for removal in the next versions.
|
||||
*/
|
||||
@Deprecated
|
||||
public MatchOperation(Criteria criteria) {
|
||||
this((CriteriaDefinition) criteria);
|
||||
}
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
this.criteria = criteria;
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
*/
|
||||
public MatchOperation(CriteriaDefinition criteriaDefinition) {
|
||||
|
||||
Assert.notNull(criteriaDefinition, "Criteria must not be null!");
|
||||
this.criteriaDefinition = criteriaDefinition;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -51,6 +64,6 @@ public class MatchOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$match", context.getMappedObject(criteria.getCriteriaObject()));
|
||||
return new BasicDBObject("$match", context.getMappedObject(criteriaDefinition.getCriteriaObject()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,7 +20,7 @@ import java.math.BigInteger;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.ConversionServiceFactory;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
@@ -46,10 +46,8 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
*
|
||||
* @param conversionService
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public AbstractMongoConverter(GenericConversionService conversionService) {
|
||||
this.conversionService = conversionService == null ? ConversionServiceFactory.createDefaultConversionService()
|
||||
: conversionService;
|
||||
this.conversionService = conversionService == null ? new DefaultConversionService() : conversionService;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,14 +17,15 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -69,10 +70,13 @@ public class CustomConversions {
|
||||
private final Set<ConvertiblePair> writingPairs;
|
||||
private final Set<Class<?>> customSimpleTypes;
|
||||
private final SimpleTypeHolder simpleTypeHolder;
|
||||
private final ConcurrentMap<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
|
||||
private final List<Object> converters;
|
||||
|
||||
private final Map<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
private final Map<ConvertiblePair, CacheValue> customWriteTargetTypes;
|
||||
private final Map<Class<?>, CacheValue> rawWriteTargetTypes;
|
||||
|
||||
/**
|
||||
* Creates an empty {@link CustomConversions} object.
|
||||
*/
|
||||
@@ -92,7 +96,9 @@ public class CustomConversions {
|
||||
this.readingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.writingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.customSimpleTypes = new HashSet<Class<?>>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<GenericConverter.ConvertiblePair, CacheValue>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<ConvertiblePair, CacheValue>();
|
||||
this.customWriteTargetTypes = new ConcurrentHashMap<ConvertiblePair, CacheValue>();
|
||||
this.rawWriteTargetTypes = new ConcurrentHashMap<Class<?>, CacheValue>();
|
||||
|
||||
List<Object> toRegister = new ArrayList<Object>();
|
||||
|
||||
@@ -235,70 +241,103 @@ public class CustomConversions {
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType) {
|
||||
return getCustomWriteTarget(sourceType, null);
|
||||
public Class<?> getCustomWriteTarget(final Class<?> sourceType) {
|
||||
|
||||
return getOrCreateAndCache(sourceType, rawWriteTargetTypes, new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, null, writingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the target type we can write an onject of the given source type to. The returned type might be a subclass
|
||||
* oth the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply return the
|
||||
* first target type matching or {@literal null} if no conversion can be found.
|
||||
* Returns the target type we can readTargetWriteLocl an inject of the given source type to. The returned type might
|
||||
* be a subclass of the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply
|
||||
* return the first target type matching or {@literal null} if no conversion can be found.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
public Class<?> getCustomWriteTarget(final Class<?> sourceType, final Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
if (requestedTargetType == null) {
|
||||
return getCustomWriteTarget(sourceType);
|
||||
}
|
||||
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customWriteTargetTypes,
|
||||
new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write into a Mongo native type. The returned type might
|
||||
* be a subclass of the given expected type though.
|
||||
* Returns whether we have a custom conversion registered to readTargetWriteLocl into a Mongo native type. The
|
||||
* returned type might be a subclass of the given expected type though.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return hasCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write an object of the given source type into an object
|
||||
* of the given Mongo native target type.
|
||||
* Returns whether we have a custom conversion registered to readTargetWriteLocl an object of the given source type
|
||||
* into an object of the given Mongo native target type.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return getCustomWriteTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to read the given source into the given target type.
|
||||
* Returns whether we have a custom conversion registered to readTargetReadLock the given source into the given target
|
||||
* type.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(requestedTargetType);
|
||||
|
||||
return getCustomReadTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair} for ones that have a source compatible type as source. Additionally
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(final Class<?> sourceType, final Class<?> requestedTargetType) {
|
||||
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customReadTargetTypes,
|
||||
new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, requestedTargetType, readingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair}s for ones that have a source compatible type as source. Additionally
|
||||
* checks assignability of the target type if one is given.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
@@ -307,11 +346,15 @@ public class CustomConversions {
|
||||
* @return
|
||||
*/
|
||||
private static Class<?> getCustomTarget(Class<?> sourceType, Class<?> requestedTargetType,
|
||||
Iterable<ConvertiblePair> pairs) {
|
||||
Collection<ConvertiblePair> pairs) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(pairs);
|
||||
|
||||
if (requestedTargetType != null && pairs.contains(new ConvertiblePair(sourceType, requestedTargetType))) {
|
||||
return requestedTargetType;
|
||||
}
|
||||
|
||||
for (ConvertiblePair typePair : pairs) {
|
||||
if (typePair.getSourceType().isAssignableFrom(sourceType)) {
|
||||
Class<?> targetType = typePair.getTargetType();
|
||||
@@ -325,32 +368,31 @@ public class CustomConversions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
* Will try to find a value for the given key in the given cache or produce one using the given {@link Producer} and
|
||||
* store it in the cache.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @param key the key to lookup a potentially existing value, must not be {@literal null}.
|
||||
* @param cache the cache to find the value in, must not be {@literal null}.
|
||||
* @param producer the {@link Producer} to create values to cache, must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
private static <T> Class<?> getOrCreateAndCache(T key, Map<T, CacheValue> cache, Producer producer) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
CacheValue cacheValue = cache.get(key);
|
||||
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
if (cacheValue != null) {
|
||||
return cacheValue.getType();
|
||||
}
|
||||
|
||||
ConvertiblePair lookupKey = new ConvertiblePair(sourceType, requestedTargetType);
|
||||
CacheValue readTargetTypeValue = customReadTargetTypes.get(lookupKey);
|
||||
Class<?> type = producer.get();
|
||||
cache.put(key, CacheValue.of(type));
|
||||
|
||||
if (readTargetTypeValue != null) {
|
||||
return readTargetTypeValue.getType();
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
readTargetTypeValue = CacheValue.of(getCustomTarget(sourceType, requestedTargetType, readingPairs));
|
||||
CacheValue cacheValue = customReadTargetTypes.putIfAbsent(lookupKey, readTargetTypeValue);
|
||||
private interface Producer {
|
||||
|
||||
return cacheValue != null ? cacheValue.getType() : readTargetTypeValue.getType();
|
||||
Class<?> get();
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
class DBObjectAccessor {
|
||||
|
||||
private final DBObject dbObject;
|
||||
private final BasicDBObject dbObject;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DBObjectAccessor} for the given {@link DBObject}.
|
||||
@@ -46,7 +46,7 @@ class DBObjectAccessor {
|
||||
Assert.notNull(dbObject, "DBObject must not be null!");
|
||||
Assert.isInstanceOf(BasicDBObject.class, dbObject, "Given DBObject must be a BasicDBObject!");
|
||||
|
||||
this.dbObject = dbObject;
|
||||
this.dbObject = (BasicDBObject) dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -62,6 +62,11 @@ class DBObjectAccessor {
|
||||
Assert.notNull(prop, "MongoPersistentProperty must not be null!");
|
||||
String fieldName = prop.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
dbObject.put(fieldName, value);
|
||||
return;
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
DBObject dbObject = this.dbObject;
|
||||
|
||||
@@ -87,12 +92,16 @@ class DBObjectAccessor {
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return this.dbObject.get(fieldName);
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<Object, Object> source = this.dbObject.toMap();
|
||||
Map<String, Object> source = this.dbObject;
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
@@ -108,14 +117,14 @@ class DBObjectAccessor {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<Object, Object> getAsMap(Object source) {
|
||||
private Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return ((DBObject) source).toMap();
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<Object, Object>) source;
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -56,6 +56,7 @@ import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
@@ -75,6 +76,8 @@ import com.mongodb.DBRef;
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s";
|
||||
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
|
||||
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -213,6 +216,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (S) readMap(typeToUse, dbo, parent);
|
||||
}
|
||||
|
||||
if (dbo instanceof BasicDBList) {
|
||||
throw new MappingException(String.format(INCOMPATIBLE_TYPES, dbo, BasicDBList.class, typeToUse.getType(), parent));
|
||||
}
|
||||
|
||||
// Retrieve persistent entity info
|
||||
MongoPersistentEntity<S> persistentEntity = (MongoPersistentEntity<S>) mappingContext
|
||||
.getPersistentEntity(typeToUse);
|
||||
@@ -253,8 +260,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
Object obj = getValueInternal(prop, dbo, evaluator, result);
|
||||
wrapper.setProperty(prop, obj);
|
||||
wrapper.setProperty(prop, getValueInternal(prop, dbo, evaluator, result));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -262,19 +268,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
MongoPersistentProperty property = association.getInverse();
|
||||
final MongoPersistentProperty property = association.getInverse();
|
||||
Object value = dbo.get(property.getFieldName());
|
||||
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
Object value = dbo.get(property.getName());
|
||||
DBRef dbref = value instanceof DBRef ? (DBRef) value : null;
|
||||
Object obj = dbRefResolver.resolveDbRef(property, dbref, new DbRefResolverCallback() {
|
||||
wrapper.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, new DbRefResolverCallback() {
|
||||
|
||||
@Override
|
||||
public Object resolve(MongoPersistentProperty property) {
|
||||
return getValueInternal(property, dbo, evaluator, parent);
|
||||
}
|
||||
});
|
||||
|
||||
wrapper.setProperty(property, obj);
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -314,14 +322,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
boolean handledByCustomConverter = conversions.getCustomWriteTarget(obj.getClass(), DBObject.class) != null;
|
||||
TypeInformation<? extends Object> type = ClassTypeInformation.from(obj.getClass());
|
||||
Class<?> entityType = obj.getClass();
|
||||
boolean handledByCustomConverter = conversions.getCustomWriteTarget(entityType, DBObject.class) != null;
|
||||
TypeInformation<? extends Object> type = ClassTypeInformation.from(entityType);
|
||||
|
||||
if (!handledByCustomConverter && !(dbo instanceof BasicDBList)) {
|
||||
typeMapper.writeType(type, dbo);
|
||||
}
|
||||
|
||||
writeInternal(obj, dbo, type);
|
||||
Object target = obj instanceof LazyLoadingProxy ? ((LazyLoadingProxy) obj).initialize() : obj;
|
||||
|
||||
writeInternal(target, dbo, type);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -337,7 +348,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
Class<?> customTarget = conversions.getCustomWriteTarget(obj.getClass(), DBObject.class);
|
||||
Class<?> entityType = obj.getClass();
|
||||
Class<?> customTarget = conversions.getCustomWriteTarget(entityType, DBObject.class);
|
||||
|
||||
if (customTarget != null) {
|
||||
DBObject result = conversionService.convert(obj, DBObject.class);
|
||||
@@ -345,17 +357,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
if (Map.class.isAssignableFrom(obj.getClass())) {
|
||||
if (Map.class.isAssignableFrom(entityType)) {
|
||||
writeMapInternal((Map<Object, Object>) obj, dbo, ClassTypeInformation.MAP);
|
||||
return;
|
||||
}
|
||||
|
||||
if (Collection.class.isAssignableFrom(obj.getClass())) {
|
||||
if (Collection.class.isAssignableFrom(entityType)) {
|
||||
writeCollectionInternal((Collection<?>) obj, ClassTypeInformation.LIST, (BasicDBList) dbo);
|
||||
return;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(obj.getClass());
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityType);
|
||||
writeInternal(obj, dbo, entity);
|
||||
addCustomTypeKeyIfNecessary(typeHint, obj, dbo);
|
||||
}
|
||||
@@ -559,7 +571,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
String simpleKey = prepareMapKey(key.toString());
|
||||
dbObject.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
|
||||
} else {
|
||||
@@ -611,12 +623,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected DBObject writeMapInternal(Map<Object, Object> obj, DBObject dbo, TypeInformation<?> propertyType) {
|
||||
|
||||
for (Map.Entry<Object, Object> entry : obj.entrySet()) {
|
||||
|
||||
Object key = entry.getKey();
|
||||
Object val = entry.getValue();
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
// Don't use conversion service here as removal of ObjectToString converter results in some primitive types not
|
||||
// being convertable
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
|
||||
String simpleKey = prepareMapKey(key);
|
||||
if (val == null || conversions.isSimpleType(val.getClass())) {
|
||||
writeSimpleInternal(val, dbo, simpleKey);
|
||||
} else if (val instanceof Collection || val.getClass().isArray()) {
|
||||
@@ -637,6 +650,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the given {@link Map} key to be converted into a {@link String}. Will invoke potentially registered custom
|
||||
* conversions and escape dots from the result as they're not supported as {@link Map} key in MongoDB.
|
||||
*
|
||||
* @param key must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String prepareMapKey(Object key) {
|
||||
|
||||
Assert.notNull(key, "Map key must not be null!");
|
||||
|
||||
String convertedKey = potentiallyConvertMapKey(key);
|
||||
return potentiallyEscapeMapKey(convertedKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Potentially replaces dots in the given map key with the configured map key replacement if configured or aborts
|
||||
* conversion if none is configured.
|
||||
@@ -659,6 +687,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return source.replaceAll("\\.", mapKeyDotReplacement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link String} representation of the given {@link Map} key
|
||||
*
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
private String potentiallyConvertMapKey(Object key) {
|
||||
|
||||
if (key instanceof String) {
|
||||
return (String) key;
|
||||
}
|
||||
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class) ? (String) getPotentiallyConvertedSimpleWrite(key)
|
||||
: key.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates the map key replacements in the given key just read with a dot in case a map key replacement has been
|
||||
* configured.
|
||||
@@ -682,10 +726,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
TypeInformation<?> actualType = type != null ? type.getActualType() : null;
|
||||
Class<?> reference = actualType == null ? Object.class : actualType.getType();
|
||||
Class<?> valueType = ClassUtils.getUserClass(value.getClass());
|
||||
|
||||
boolean notTheSameClass = !value.getClass().equals(reference);
|
||||
boolean notTheSameClass = !valueType.equals(reference);
|
||||
if (notTheSameClass) {
|
||||
typeMapper.writeType(value.getClass(), dbObject);
|
||||
typeMapper.writeType(valueType, dbObject);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -738,7 +783,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private Object getPotentiallyConvertedSimpleRead(Object value, Class<?> target) {
|
||||
|
||||
if (value == null || target == null) {
|
||||
if (value == null || target == null || target.isAssignableFrom(value.getClass())) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -750,7 +795,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return Enum.valueOf((Class<Enum>) target, value.toString());
|
||||
}
|
||||
|
||||
return target.isAssignableFrom(value.getClass()) ? value : conversionService.convert(value, target);
|
||||
return conversionService.convert(value, target);
|
||||
}
|
||||
|
||||
protected DBRef createDBRef(Object target, MongoPersistentProperty property) {
|
||||
@@ -791,11 +836,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
idMapper.convertId(id));
|
||||
}
|
||||
|
||||
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator eval,
|
||||
protected Object getValueInternal(MongoPersistentProperty prop, DBObject dbo, SpELExpressionEvaluator evaluator,
|
||||
Object parent) {
|
||||
|
||||
MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(dbo, spELContext, parent);
|
||||
return provider.getPropertyValue(prop);
|
||||
return new MongoDbPropertyValueProvider(dbo, evaluator, parent).getPropertyValue(prop);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -924,7 +968,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return getPotentiallyConvertedSimpleWrite(obj);
|
||||
}
|
||||
|
||||
TypeInformation<?> typeHint = typeInformation == null ? ClassTypeInformation.OBJECT : typeInformation;
|
||||
TypeInformation<?> typeHint = typeInformation;
|
||||
|
||||
if (obj instanceof BasicDBList) {
|
||||
return maybeConvertList((BasicDBList) obj, typeHint);
|
||||
@@ -1012,17 +1056,27 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field
|
||||
* of the configured source {@link DBObject}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private class MongoDbPropertyValueProvider implements PropertyValueProvider<MongoPersistentProperty> {
|
||||
|
||||
private final DBObjectAccessor source;
|
||||
private final SpELExpressionEvaluator evaluator;
|
||||
private final Object parent;
|
||||
|
||||
public MongoDbPropertyValueProvider(DBObject source, SpELContext factory, Object parent) {
|
||||
this(source, new DefaultSpELExpressionEvaluator(source, factory), parent);
|
||||
}
|
||||
|
||||
public MongoDbPropertyValueProvider(DBObject source, DefaultSpELExpressionEvaluator evaluator, Object parent) {
|
||||
/**
|
||||
* Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and
|
||||
* parent object.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param evaluator must not be {@literal null}.
|
||||
* @param parent can be {@literal null}.
|
||||
*/
|
||||
public MongoDbPropertyValueProvider(DBObject source, SpELExpressionEvaluator evaluator, Object parent) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(evaluator);
|
||||
|
||||
@@ -397,13 +397,20 @@ public class QueryMapper {
|
||||
*/
|
||||
public Object convertId(Object id) {
|
||||
|
||||
try {
|
||||
return conversionService.convert(id, ObjectId.class);
|
||||
} catch (ConversionException e) {
|
||||
// Ignore
|
||||
if (id == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegateConvertToMongoType(id, null);
|
||||
if (id instanceof String) {
|
||||
return ObjectId.isValid(id.toString()) ? conversionService.convert(id, ObjectId.class) : id;
|
||||
}
|
||||
|
||||
try {
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService
|
||||
.convert(id, ObjectId.class) : delegateConvertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -73,7 +73,43 @@ public @interface CompoundIndex {
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* The name of the index to be created.
|
||||
* The name of the index to be created. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* @CompoundIndex(name = "compound_index", def = "{'h1': 1, 'h2': 1}")
|
||||
* class Hybrid {
|
||||
* String h1, h2;
|
||||
* }
|
||||
*
|
||||
* @CompoundIndex(name = "compound_index", def = "{'n1': 1, 'n2': 1}")
|
||||
* class Nested {
|
||||
* String n1, n2;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: 1, hybrid.h2: 1 } , { name: "hybrid.compound_index" } )
|
||||
* db.root.ensureIndex( { nested.n1: 1, nested.n2: 1 } , { name: "nested.compound_index" } )
|
||||
* db.hybrid.ensureIndex( { h1: 1, h2: 1 } , { name: "compound_index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@@ -107,8 +143,11 @@ public @interface CompoundIndex {
|
||||
/**
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @deprecated TTL cannot be defined for {@link CompoundIndex} having more than one field as key. Will be removed in
|
||||
* 1.6.
|
||||
* @see http://docs.mongodb.org/manual/tutorial/expire-data/
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
int expireAfterSeconds() default -1;
|
||||
}
|
||||
|
||||
@@ -32,7 +32,41 @@ import java.lang.annotation.Target;
|
||||
public @interface GeoSpatialIndexed {
|
||||
|
||||
/**
|
||||
* Name of the property in the document that contains the [x, y] or radial coordinates to index.
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @GeoSpatialIndexed(name="index") Point h1;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
* @GeoSpatialIndexed(name="index") Point n1;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: "2d" } , { name: "hybrid.index" } )
|
||||
* db.root.ensureIndex( { nested.n1: "2d" } , { name: "nested.index" } )
|
||||
* db.hybrid.ensureIndex( { h1: "2d" } , { name: "index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -58,7 +58,41 @@ public @interface Indexed {
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* Index name.
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @Indexed(name="index") String h1;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
* @Indexed(name="index") String n1;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.ensureIndex( { hybrid.h1: 1 } , { name: "hybrid.index" } )
|
||||
* db.root.ensureIndex( { nested.n1: 1 } , { name: "nested.index" } )
|
||||
* db.hybrid.ensureIndex( { h1: 1} , { name: "index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -60,4 +60,10 @@ public class MongoMappingEventPublisher implements ApplicationEventPublisher {
|
||||
indexCreator.onApplicationEvent((MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>) event);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationEventPublisher#publishEvent(java.lang.Object)
|
||||
*/
|
||||
public void publishEvent(Object event) {}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,7 +29,6 @@ import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexRes
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -43,8 +42,7 @@ import org.springframework.util.Assert;
|
||||
* @author Laurent Canet
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
public class MongoPersistentEntityIndexCreator implements ApplicationListener<MappingContextEvent<?, ?>> {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
|
||||
@@ -54,7 +52,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
private final IndexResolver indexResolver;
|
||||
|
||||
/**
|
||||
* Creats a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
@@ -65,7 +63,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
}
|
||||
|
||||
/**
|
||||
* Creats a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
@@ -92,7 +90,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
public void onApplicationEvent(MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty> event) {
|
||||
public void onApplicationEvent(MappingContextEvent<?, ?> event) {
|
||||
|
||||
if (!event.wasEmittedBy(mappingContext)) {
|
||||
return;
|
||||
@@ -102,7 +100,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
|
||||
// Double check type as Spring infrastructure does not consider nested generics
|
||||
if (entity instanceof MongoPersistentEntity) {
|
||||
checkForIndexes(event.getPersistentEntity());
|
||||
checkForIndexes((MongoPersistentEntity<?>) entity);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,8 +130,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
}
|
||||
|
||||
private void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection())
|
||||
.createIndex(indexDefinition.getIndexKeys(), indexDefinition.getIndexOptions());
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,12 +21,9 @@ import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.core.annotation.AnnotationUtils;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
@@ -94,7 +91,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
Assert.notNull(document, "Given entity is not collection root.");
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", root.getCollection(), root.getType()));
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", root.getCollection(), root));
|
||||
|
||||
final CycleGuard guard = new CycleGuard();
|
||||
|
||||
@@ -103,15 +100,19 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(),
|
||||
persistentProperty.getFieldName(), root.getCollection(), guard));
|
||||
}
|
||||
try {
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(),
|
||||
persistentProperty.getFieldName(), root.getCollection(), guard));
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(
|
||||
persistentProperty.getFieldName(), root.getCollection(), persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexInformation.add(indexDefinitionHolder);
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(
|
||||
persistentProperty.getFieldName(), root.getCollection(), persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexInformation.add(indexDefinitionHolder);
|
||||
}
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.warn(e.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -131,10 +132,11 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final Class<?> type, final String path,
|
||||
final String collection, final CycleGuard guard) {
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(path, collection, type));
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(path, collection, entity));
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
@@ -176,14 +178,13 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> potentiallyCreateCompoundIndexDefinitions(String dotPath, String collection,
|
||||
Class<?> type) {
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (AnnotationUtils.findAnnotation(type, CompoundIndexes.class) == null
|
||||
&& AnnotationUtils.findAnnotation(type, CompoundIndex.class) == null) {
|
||||
if (entity.findAnnotation(CompoundIndexes.class) == null && entity.findAnnotation(CompoundIndex.class) == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return createCompoundIndexDefinitions(dotPath, collection, type);
|
||||
return createCompoundIndexDefinitions(dotPath, collection, entity);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -195,34 +196,35 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
* @return
|
||||
*/
|
||||
protected List<IndexDefinitionHolder> createCompoundIndexDefinitions(String dotPath, String fallbackCollection,
|
||||
Class<?> type) {
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = new ArrayList<MongoPersistentEntityIndexResolver.IndexDefinitionHolder>();
|
||||
CompoundIndexes indexes = AnnotationUtils.findAnnotation(type, CompoundIndexes.class);
|
||||
CompoundIndexes indexes = entity.findAnnotation(CompoundIndexes.class);
|
||||
|
||||
if (indexes != null) {
|
||||
for (CompoundIndex index : indexes.value()) {
|
||||
indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index));
|
||||
indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index, entity));
|
||||
}
|
||||
}
|
||||
|
||||
CompoundIndex index = AnnotationUtils.findAnnotation(type, CompoundIndex.class);
|
||||
CompoundIndex index = entity.findAnnotation(CompoundIndex.class);
|
||||
|
||||
if (index != null) {
|
||||
indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index));
|
||||
indexDefinitions.add(createCompoundIndexDefinition(dotPath, fallbackCollection, index, entity));
|
||||
}
|
||||
|
||||
return indexDefinitions;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String fallbackCollection,
|
||||
CompoundIndex index) {
|
||||
CompoundIndex index, MongoPersistentEntity<?> entity) {
|
||||
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(resolveCompoundIndexKeyFromStringDefinition(
|
||||
dotPath, index.def()));
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(index.name());
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
@@ -237,8 +239,14 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexDefinition.background();
|
||||
}
|
||||
|
||||
if (index.expireAfterSeconds() >= 0) {
|
||||
indexDefinition.expire(index.expireAfterSeconds(), TimeUnit.SECONDS);
|
||||
int ttl = index.expireAfterSeconds();
|
||||
|
||||
if (ttl >= 0) {
|
||||
if (indexDefinition.getIndexKeys().keySet().size() > 1) {
|
||||
LOGGER.warn("TTL is not supported for compound index with more than one key. TTL={} will be ignored.", ttl);
|
||||
} else {
|
||||
indexDefinition.expire(ttl, TimeUnit.SECONDS);
|
||||
}
|
||||
}
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : fallbackCollection;
|
||||
@@ -287,7 +295,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
IndexDirection.ASCENDING.equals(index.direction()) ? Sort.Direction.ASC : Sort.Direction.DESC);
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(StringUtils.hasText(index.name()) ? index.name() : dotPath);
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
@@ -329,7 +337,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexDefinition.withMin(index.min()).withMax(index.max());
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(StringUtils.hasText(index.name()) ? index.name() : persistentProperty.getName());
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persistentProperty));
|
||||
}
|
||||
|
||||
indexDefinition.typed(index.type()).withBucketSize(index.bucketSize()).withAdditionalField(index.additionalField());
|
||||
@@ -337,13 +345,30 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private String pathAwareIndexName(String indexName, String dotPath, MongoPersistentProperty property) {
|
||||
|
||||
String nameToUse = StringUtils.hasText(indexName) ? indexName : "";
|
||||
|
||||
if (!StringUtils.hasText(dotPath) || (property != null && dotPath.equals(property.getFieldName()))) {
|
||||
return StringUtils.hasText(nameToUse) ? nameToUse : dotPath;
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(dotPath)) {
|
||||
|
||||
nameToUse = StringUtils.hasText(nameToUse) ? (property != null ? dotPath.replace("." + property.getFieldName(),
|
||||
"") : dotPath) + "." + nameToUse : dotPath;
|
||||
}
|
||||
return nameToUse;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used
|
||||
* to detect potential cycles within the references.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class CycleGuard {
|
||||
static class CycleGuard {
|
||||
|
||||
private final Map<String, List<Path>> propertyTypeMap;
|
||||
|
||||
@@ -355,6 +380,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
* @param property The property to inspect
|
||||
* @param path The path under which the property can be reached.
|
||||
* @throws CyclicPropertyReferenceException in case a potential cycle is detected.
|
||||
* @see Path#cycles(MongoPersistentProperty, String)
|
||||
*/
|
||||
void protect(MongoPersistentProperty property, String path) throws CyclicPropertyReferenceException {
|
||||
|
||||
@@ -365,15 +391,15 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
for (Path existingPath : paths) {
|
||||
|
||||
if (existingPath.cycles(property)) {
|
||||
if (existingPath.cycles(property, path) && property.isEntity()) {
|
||||
paths.add(new Path(property, path));
|
||||
|
||||
throw new CyclicPropertyReferenceException(property.getFieldName(), property.getOwner().getType(),
|
||||
existingPath.getPath());
|
||||
}
|
||||
}
|
||||
|
||||
paths.add(new Path(property, path));
|
||||
|
||||
} else {
|
||||
|
||||
ArrayList<Path> paths = new ArrayList<Path>();
|
||||
@@ -386,7 +412,30 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return property.getOwner().getType().getSimpleName() + ":" + property.getFieldName();
|
||||
}
|
||||
|
||||
private static class Path {
|
||||
/**
|
||||
* Path defines the property and its full path from the document root. <br />
|
||||
* A {@link Path} with {@literal spring.data.mongodb} would be created for the property {@code Three.mongodb}.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class One {
|
||||
* Two spring;
|
||||
* }
|
||||
*
|
||||
* class Two {
|
||||
* Three data;
|
||||
* }
|
||||
*
|
||||
* class Three {
|
||||
* String mongodb;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class Path {
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private final String path;
|
||||
@@ -401,17 +450,23 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return path;
|
||||
}
|
||||
|
||||
boolean cycles(MongoPersistentProperty property) {
|
||||
/**
|
||||
* Checks whether the given property is owned by the same entity and if it has been already visited by a subset of
|
||||
* the current path. Given {@literal foo.bar.bar} cycles if {@literal foo.bar} has already been visited and
|
||||
* {@code class Bar} contains a property of type {@code Bar}. The previously mentioned path would not cycle if
|
||||
* {@code class Bar} contained a property of type {@code SomeEntity} named {@literal bar}.
|
||||
*
|
||||
* @param property
|
||||
* @param path
|
||||
* @return
|
||||
*/
|
||||
boolean cycles(MongoPersistentProperty property, String path) {
|
||||
|
||||
Pattern pattern = Pattern.compile("\\p{Punct}?" + Pattern.quote(property.getFieldName()) + "(\\p{Punct}|\\w)?");
|
||||
Matcher matcher = pattern.matcher(path);
|
||||
|
||||
int count = 0;
|
||||
while (matcher.find()) {
|
||||
count++;
|
||||
if (!property.getOwner().equals(this.property.getOwner())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return count >= 1 && property.getOwner().getType().equals(this.property.getOwner().getType());
|
||||
return path.equals(this.path) || path.contains(this.path + ".") || path.contains("." + this.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -441,8 +496,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
*/
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return String.format("Found cycle for field '%s' in type '%s' for path '%s'", propertyName, type.getSimpleName(),
|
||||
dotPath);
|
||||
return String.format("Found cycle for field '%s' in type '%s' for path '%s'", propertyName,
|
||||
type != null ? type.getSimpleName() : "unknown", dotPath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ import org.springframework.util.StringUtils;
|
||||
public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, MongoPersistentProperty> implements
|
||||
MongoPersistentEntity<T>, ApplicationContextAware {
|
||||
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @DocumentField annotation!";
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @Field annotation!";
|
||||
private final String collection;
|
||||
private final SpelExpressionParser parser;
|
||||
private final StandardEvaluationContext context;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,6 +30,8 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
private Boolean isIdProperty;
|
||||
private Boolean isAssociation;
|
||||
private String fieldName;
|
||||
private Boolean usePropertyAccess;
|
||||
private Boolean isTransient;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CachingMongoPersistentProperty}.
|
||||
@@ -84,4 +86,32 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#usePropertyAccess()
|
||||
*/
|
||||
@Override
|
||||
public boolean usePropertyAccess() {
|
||||
|
||||
if (this.usePropertyAccess == null) {
|
||||
this.usePropertyAccess = super.usePropertyAccess();
|
||||
}
|
||||
|
||||
return this.usePropertyAccess;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#isTransient()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransient() {
|
||||
|
||||
if (this.isTransient == null) {
|
||||
this.isTransient = super.isTransient();
|
||||
}
|
||||
|
||||
return this.isTransient;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
int getFieldOrder();
|
||||
|
||||
/**
|
||||
* Returns whether the propert is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* Returns whether the property is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* {@link #getDBRef()} to return an non-{@literal null} value.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -87,12 +88,8 @@ public class BasicUpdate extends Update {
|
||||
|
||||
@Override
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
DBObject keyValue = new BasicDBObject();
|
||||
keyValue.put(key, convertedValues);
|
||||
keyValue.put(key, Arrays.copyOf(values, values.length));
|
||||
updateObject.put("$pullAll", keyValue);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -31,7 +31,9 @@ import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -529,8 +531,11 @@ public class Criteria implements CriteriaDefinition {
|
||||
* @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getCriteriaObject()
|
||||
*/
|
||||
public DBObject getCriteriaObject() {
|
||||
|
||||
if (this.criteriaChain.size() == 1) {
|
||||
return criteriaChain.get(0).getSingleCriteriaObject();
|
||||
} else if (CollectionUtils.isEmpty(this.criteriaChain) && !CollectionUtils.isEmpty(this.criteria)) {
|
||||
return getSingleCriteriaObject();
|
||||
} else {
|
||||
DBObject criteriaObject = new BasicDBObject();
|
||||
for (Criteria c : this.criteriaChain) {
|
||||
@@ -564,6 +569,13 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(this.key)) {
|
||||
if (not) {
|
||||
return new BasicDBObject("$not", dbo);
|
||||
}
|
||||
return dbo;
|
||||
}
|
||||
|
||||
DBObject queryCriteria = new BasicDBObject();
|
||||
|
||||
if (!NOT_SET.equals(isValue)) {
|
||||
|
||||
@@ -166,7 +166,7 @@ public class Query {
|
||||
|
||||
for (Order order : sort) {
|
||||
if (order.isIgnoreCase()) {
|
||||
throw new IllegalArgumentException(String.format("Gven sort contained an Order for %s with ignore case! "
|
||||
throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case! "
|
||||
+ "MongoDB does not support sorting ignoreing case currently!", order.getProperty()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,10 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.springframework.util.ObjectUtils.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
@@ -40,6 +41,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @author Becca Gaspard
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class Update {
|
||||
|
||||
@@ -62,7 +64,7 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exlude fields from making
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exclude fields from making
|
||||
* it into the created {@link Update} object. Note, that this will set attributes directly and <em>not</em> use
|
||||
* {@literal $set}. This means fields not given in the {@link DBObject} will be nulled when executing the update. To
|
||||
* create an only-updating {@link Update} instance of a {@link DBObject}, call {@link #set(String, Object)} for each
|
||||
@@ -187,12 +189,7 @@ public class Update {
|
||||
* @return
|
||||
*/
|
||||
public Update pushAll(String key, Object[] values) {
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
addMultiFieldOperation("$pushAll", key, convertedValues);
|
||||
addMultiFieldOperation("$pushAll", key, Arrays.copyOf(values, values.length));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -256,12 +253,7 @@ public class Update {
|
||||
* @return
|
||||
*/
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
addFieldOperation("$pullAll", key, convertedValues);
|
||||
addFieldOperation("$pullAll", key, Arrays.copyOf(values, values.length));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -279,6 +271,7 @@ public class Update {
|
||||
}
|
||||
|
||||
public DBObject getUpdateObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : modifierOps.keySet()) {
|
||||
dbo.put(k, modifierOps.get(k));
|
||||
@@ -335,14 +328,52 @@ public class Update {
|
||||
return StringUtils.startsWithIgnoreCase(key, "$");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getUpdateObject().hashCode();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Update that = (Update) obj;
|
||||
return this.getUpdateObject().equals(that.getUpdateObject());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return SerializationUtils.serializeToJsonSafely(getUpdateObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Modifiers holds a distinct collection of {@link Modifier}
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class Modifiers {
|
||||
|
||||
private HashMap<String, Modifier> modifiers;
|
||||
private Map<String, Modifier> modifiers;
|
||||
|
||||
public Modifiers() {
|
||||
this.modifiers = new LinkedHashMap<String, Modifier>(1);
|
||||
@@ -355,6 +386,33 @@ public class Update {
|
||||
public void addModifier(Modifier modifier) {
|
||||
this.modifiers.put(modifier.getKey(), modifier);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return nullSafeHashCode(modifiers);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Modifiers that = (Modifiers) obj;
|
||||
|
||||
return this.modifiers.equals(that.modifiers);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -379,6 +437,7 @@ public class Update {
|
||||
* Implementation of {@link Modifier} representing {@code $each}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class Each implements Modifier {
|
||||
|
||||
@@ -398,29 +457,60 @@ public class Update {
|
||||
return ((Collection<?>) values[0]).toArray();
|
||||
}
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
|
||||
return convertedValues;
|
||||
return Arrays.copyOf(values, values.length);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.Update.Modifier#getKey()
|
||||
*/
|
||||
@Override
|
||||
public String getKey() {
|
||||
return "$each";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.Update.Modifier#getValue()
|
||||
*/
|
||||
@Override
|
||||
public Object getValue() {
|
||||
return this.values;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return nullSafeHashCode(values);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object that) {
|
||||
|
||||
if (this == that) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (that == null || getClass() != that.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return nullSafeEquals(values, ((Each) that).values);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@code $push} modifiers
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class PushOperatorBuilder {
|
||||
|
||||
@@ -453,6 +543,50 @@ public class Update {
|
||||
public Update value(Object value) {
|
||||
return Update.this.push(key, value);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * result + getOuterType().hashCode();
|
||||
result += 31 * result + nullSafeHashCode(key);
|
||||
result += 31 * result + nullSafeHashCode(modifiers);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PushOperatorBuilder that = (PushOperatorBuilder) obj;
|
||||
|
||||
if (!getOuterType().equals(that.getOuterType())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return nullSafeEquals(this.key, that.key) && nullSafeEquals(this.modifiers, that.modifiers);
|
||||
}
|
||||
|
||||
private Update getOuterType() {
|
||||
return Update.this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -488,7 +622,5 @@ public class Update {
|
||||
public Update value(Object value) {
|
||||
return Update.this.addToSet(this.key, value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@ import org.springframework.data.annotation.QueryAnnotation;
|
||||
public @interface Query {
|
||||
|
||||
/**
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precendece over the
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the
|
||||
* method name then.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -284,7 +284,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return countProjection ? operations.count(query, metadata.getJavaType()) : operations.findOne(query,
|
||||
metadata.getJavaType());
|
||||
metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -388,10 +388,10 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
private Object deleteAndConvertResult(Query query, MongoEntityMetadata<?> metadata) {
|
||||
|
||||
if (method.isCollectionQuery()) {
|
||||
return operations.findAllAndRemove(query, metadata.getJavaType());
|
||||
return operations.findAllAndRemove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
WriteResult writeResult = operations.remove(query, metadata.getCollectionName());
|
||||
WriteResult writeResult = operations.remove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
return writeResult != null ? writeResult.getN() : 0L;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,13 +20,16 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
@@ -40,16 +43,19 @@ import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom query creator to create Mongo criterias.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MongoQueryCreator.class);
|
||||
private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}");
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final boolean isGeoNearQuery;
|
||||
|
||||
@@ -198,7 +204,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
case CONTAINING:
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
return createContainingCriteria(part, property, criteria, parameters);
|
||||
case REGEX:
|
||||
return criteria.regex(parameters.next().toString());
|
||||
case EXISTS:
|
||||
@@ -216,7 +222,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
if (distance == null) {
|
||||
return criteria.near(point);
|
||||
} else {
|
||||
if (distance.getMetric() != null) {
|
||||
if (!Metrics.NEUTRAL.equals(distance.getMetric())) {
|
||||
criteria.nearSphere(point);
|
||||
} else {
|
||||
criteria.near(point);
|
||||
@@ -269,19 +275,23 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters, boolean shouldNegateExpression) {
|
||||
|
||||
PropertyPath path = part.getProperty().getLeafProperty();
|
||||
|
||||
switch (part.shouldIgnoreCase()) {
|
||||
|
||||
case ALWAYS:
|
||||
if (part.getProperty().getType() != String.class) {
|
||||
throw new IllegalArgumentException(String.format("part %s must be of type String but was %s",
|
||||
part.getProperty(), part.getType()));
|
||||
if (path.getType() != String.class) {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Part %s must be of type String but was %s", path, path.getType()));
|
||||
}
|
||||
// fall-through
|
||||
|
||||
case WHEN_POSSIBLE:
|
||||
|
||||
if (shouldNegateExpression) {
|
||||
criteria = criteria.not();
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.nextConverted(property).toString());
|
||||
|
||||
case NEVER:
|
||||
@@ -292,6 +302,27 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
Arrays.asList(IgnoreCaseType.ALWAYS, IgnoreCaseType.WHEN_POSSIBLE), part.shouldIgnoreCase()));
|
||||
}
|
||||
|
||||
/**
|
||||
* If the target property of the comparison is of type String, then the operator checks for match using regular
|
||||
* expression. If the target property of the comparison is a {@link Collection} then the operator evaluates to true if
|
||||
* it finds an exact match within any member of the {@link Collection}.
|
||||
*
|
||||
* @param part
|
||||
* @param property
|
||||
* @param criteria
|
||||
* @param parameters
|
||||
* @return
|
||||
*/
|
||||
private Criteria createContainingCriteria(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an appropriate like-regex and appends it to the given criteria.
|
||||
*
|
||||
@@ -337,8 +368,8 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return (T) parameter;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected parameter type of %s but got %s!", type,
|
||||
parameter.getClass()));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(PotentiallyConvertingIterator iterator, MongoPersistentProperty property) {
|
||||
@@ -356,23 +387,57 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
Type type = part.getType();
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, part);
|
||||
|
||||
switch (type) {
|
||||
case STARTING_WITH:
|
||||
source = "^" + source;
|
||||
regex = "^" + regex;
|
||||
break;
|
||||
case ENDING_WITH:
|
||||
source = source + "$";
|
||||
regex = regex + "$";
|
||||
break;
|
||||
case CONTAINING:
|
||||
source = "*" + source + "*";
|
||||
regex = ".*" + regex + ".*";
|
||||
break;
|
||||
case SIMPLE_PROPERTY:
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
source = "^" + source + "$";
|
||||
regex = "^" + regex + "$";
|
||||
default:
|
||||
}
|
||||
|
||||
return source.replaceAll("\\*", ".*");
|
||||
return regex;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Part qpart) {
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(Type.LIKE, qpart.getType())) {
|
||||
return PUNCTATION_PATTERN.matcher(source).find() ? Pattern.quote(source) : source;
|
||||
}
|
||||
|
||||
if (source.equals("*")) {
|
||||
return ".*";
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
boolean leadingWildcard = source.startsWith("*");
|
||||
boolean trailingWildcard = source.endsWith("*");
|
||||
|
||||
String valueToUse = source.substring(leadingWildcard ? 1 : 0,
|
||||
trailingWildcard ? source.length() - 1 : source.length());
|
||||
|
||||
if (PUNCTATION_PATTERN.matcher(valueToUse).find()) {
|
||||
valueToUse = Pattern.quote(valueToUse);
|
||||
}
|
||||
|
||||
if (leadingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
sb.append(valueToUse);
|
||||
if (trailingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,8 +126,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(),
|
||||
collectionEntity.getCollection());
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(), collectionEntity);
|
||||
}
|
||||
|
||||
return this.metadata;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2014 the original author or authors.
|
||||
* Copyright 2002-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,10 +19,14 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* {@link RepositoryQuery} implementation for Mongo.
|
||||
@@ -67,7 +71,24 @@ public class PartTreeMongoQuery extends AbstractMongoQuery {
|
||||
protected Query createQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, accessor, context, isGeoNearQuery);
|
||||
return creator.createQuery();
|
||||
Query query = creator.createQuery();
|
||||
|
||||
String fieldSpec = this.getQueryMethod().getFieldSpecification();
|
||||
|
||||
if (!StringUtils.hasText(fieldSpec)) {
|
||||
return query;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
BasicQuery result = new BasicQuery(query.getQueryObject().toString(), fieldSpec);
|
||||
result.setSortObject(query.getSortObject());
|
||||
return result;
|
||||
|
||||
} catch (JSONParseException o_O) {
|
||||
throw new IllegalStateException(String.format("Invalid query or field specification in %s!", getQueryMethod()),
|
||||
o_O);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -25,21 +26,22 @@ import org.springframework.util.Assert;
|
||||
class SimpleMongoEntityMetadata<T> implements MongoEntityMetadata<T> {
|
||||
|
||||
private final Class<T> type;
|
||||
private final String collectionName;
|
||||
private final MongoPersistentEntity<?> collectionEntity;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoEntityMetadata} using the given type and collection name.
|
||||
* Creates a new {@link SimpleMongoEntityMetadata} using the given type and {@link MongoPersistentEntity} to use for
|
||||
* collection lookups.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @param collectionEntity must not be {@literal null} or empty.
|
||||
*/
|
||||
public SimpleMongoEntityMetadata(Class<T> type, String collectionName) {
|
||||
public SimpleMongoEntityMetadata(Class<T> type, MongoPersistentEntity<?> collectionEntity) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(collectionEntity, "Collection entity must not be null or empty!");
|
||||
|
||||
this.type = type;
|
||||
this.collectionName = collectionName;
|
||||
this.collectionEntity = collectionEntity;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -55,6 +57,6 @@ class SimpleMongoEntityMetadata<T> implements MongoEntityMetadata<T> {
|
||||
* @see org.springframework.data.mongodb.repository.query.MongoEntityMetadata#getCollectionName()
|
||||
*/
|
||||
public String getCollectionName() {
|
||||
return collectionName;
|
||||
return collectionEntity.getCollection();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@@ -23,7 +26,9 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
@@ -31,17 +36,20 @@ import com.mongodb.util.JSON;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
|
||||
private static final String COUND_AND_DELETE = "Manually defined query for %s cannot be both a count and delete query at the same time!";
|
||||
private static final Pattern PLACEHOLDER = Pattern.compile("\\?(\\d+)");
|
||||
private static final Logger LOG = LoggerFactory.getLogger(StringBasedMongoQuery.class);
|
||||
private static final ParameterBindingParser PARSER = ParameterBindingParser.INSTANCE;
|
||||
|
||||
private final String query;
|
||||
private final String fieldSpec;
|
||||
private final boolean isCountQuery;
|
||||
private final boolean isDeleteQuery;
|
||||
private final List<ParameterBinding> queryParameterBindings;
|
||||
private final List<ParameterBinding> fieldSpecParameterBindings;
|
||||
|
||||
/**
|
||||
* Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}.
|
||||
@@ -65,7 +73,11 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
super(method, mongoOperations);
|
||||
|
||||
this.query = query;
|
||||
this.queryParameterBindings = PARSER.parseParameterBindingsFrom(query);
|
||||
|
||||
this.fieldSpec = method.getFieldSpecification();
|
||||
this.fieldSpecParameterBindings = PARSER.parseParameterBindingsFrom(method.getFieldSpecification());
|
||||
|
||||
this.isCountQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().count() : false;
|
||||
this.isDeleteQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().delete() : false;
|
||||
|
||||
@@ -81,12 +93,12 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
@Override
|
||||
protected Query createQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
String queryString = replacePlaceholders(query, accessor);
|
||||
String queryString = replacePlaceholders(query, accessor, queryParameterBindings);
|
||||
|
||||
Query query = null;
|
||||
|
||||
if (fieldSpec != null) {
|
||||
String fieldString = replacePlaceholders(fieldSpec, accessor);
|
||||
String fieldString = replacePlaceholders(fieldSpec, accessor, fieldSpecParameterBindings);
|
||||
query = new BasicQuery(queryString, fieldString);
|
||||
} else {
|
||||
query = new BasicQuery(queryString);
|
||||
@@ -119,21 +131,174 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
return this.isDeleteQuery;
|
||||
}
|
||||
|
||||
private String replacePlaceholders(String input, ConvertingParameterAccessor accessor) {
|
||||
/**
|
||||
* Replaced the parameter place-holders with the actual parameter values from the given {@link ParameterBinding}s.
|
||||
*
|
||||
* @param input
|
||||
* @param accessor
|
||||
* @param bindings
|
||||
* @return
|
||||
*/
|
||||
private String replacePlaceholders(String input, ConvertingParameterAccessor accessor, List<ParameterBinding> bindings) {
|
||||
|
||||
Matcher matcher = PLACEHOLDER.matcher(input);
|
||||
String result = input;
|
||||
|
||||
while (matcher.find()) {
|
||||
String group = matcher.group();
|
||||
int index = Integer.parseInt(matcher.group(1));
|
||||
result = result.replace(group, getParameterWithIndex(accessor, index));
|
||||
if (bindings.isEmpty()) {
|
||||
return input;
|
||||
}
|
||||
|
||||
return result;
|
||||
StringBuilder result = new StringBuilder(input);
|
||||
|
||||
for (ParameterBinding binding : bindings) {
|
||||
|
||||
String parameter = binding.getParameter();
|
||||
int idx = result.indexOf(parameter);
|
||||
|
||||
if (idx != -1) {
|
||||
result.replace(idx, idx + parameter.length(), getParameterValueForBinding(accessor, binding));
|
||||
}
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
private String getParameterWithIndex(ConvertingParameterAccessor accessor, int index) {
|
||||
return JSON.serialize(accessor.getBindableValue(index));
|
||||
/**
|
||||
* Returns the serialized value to be used for the given {@link ParameterBinding}.
|
||||
*
|
||||
* @param accessor
|
||||
* @param binding
|
||||
* @return
|
||||
*/
|
||||
private String getParameterValueForBinding(ConvertingParameterAccessor accessor, ParameterBinding binding) {
|
||||
|
||||
Object value = accessor.getBindableValue(binding.getParameterIndex());
|
||||
|
||||
if (value instanceof String && binding.isQuoted()) {
|
||||
return (String) value;
|
||||
}
|
||||
|
||||
return JSON.serialize(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* A parser that extracts the parameter bindings from a given query string.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static enum ParameterBindingParser {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
private static final String PARAMETER_PREFIX = "_param_";
|
||||
private static final String PARSEABLE_PARAMETER = "\"" + PARAMETER_PREFIX + "$1\"";
|
||||
private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)");
|
||||
private static final Pattern PARSEABLE_BINDING_PATTERN = Pattern.compile("\"?" + PARAMETER_PREFIX + "(\\d+)\"?");
|
||||
|
||||
private final static int PARAMETER_INDEX_GROUP = 1;
|
||||
|
||||
/**
|
||||
* Returns a list of {@link ParameterBinding}s found in the given {@code input} or an
|
||||
* {@link Collections#emptyList()}.
|
||||
*
|
||||
* @param input
|
||||
* @return
|
||||
*/
|
||||
public List<ParameterBinding> parseParameterBindingsFrom(String input) {
|
||||
|
||||
if (!StringUtils.hasText(input)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<ParameterBinding> bindings = new ArrayList<ParameterBinding>();
|
||||
|
||||
String parseableInput = makeParameterReferencesParseable(input);
|
||||
|
||||
collectParameterReferencesIntoBindings(bindings, JSON.parse(parseableInput));
|
||||
|
||||
return bindings;
|
||||
}
|
||||
|
||||
private String makeParameterReferencesParseable(String input) {
|
||||
|
||||
Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(input);
|
||||
String parseableInput = matcher.replaceAll(PARSEABLE_PARAMETER);
|
||||
|
||||
return parseableInput;
|
||||
}
|
||||
|
||||
private void collectParameterReferencesIntoBindings(List<ParameterBinding> bindings, Object value) {
|
||||
|
||||
if (value instanceof String) {
|
||||
|
||||
String string = ((String) value).trim();
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(string);
|
||||
while (valueMatcher.find()) {
|
||||
int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP));
|
||||
boolean quoted = (string.startsWith("'") && string.endsWith("'"))
|
||||
|| (string.startsWith("\"") && string.endsWith("\""));
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
|
||||
} else if (value instanceof Pattern) {
|
||||
|
||||
String string = ((Pattern) value).toString().trim();
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(string);
|
||||
while (valueMatcher.find()) {
|
||||
int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP));
|
||||
|
||||
/*
|
||||
* The pattern is used as a direct parameter replacement, e.g. 'field': ?1,
|
||||
* therefore we treat it as not quoted to remain backwards compatible.
|
||||
*/
|
||||
boolean quoted = !string.equals(PARAMETER_PREFIX + paramIndex);
|
||||
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
|
||||
} else if (value instanceof DBObject) {
|
||||
|
||||
DBObject dbo = (DBObject) value;
|
||||
|
||||
for (String field : dbo.keySet()) {
|
||||
collectParameterReferencesIntoBindings(bindings, field);
|
||||
collectParameterReferencesIntoBindings(bindings, dbo.get(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A generic parameter binding with name or position information.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class ParameterBinding {
|
||||
|
||||
private final int parameterIndex;
|
||||
private final boolean quoted;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ParameterBinding} with the given {@code parameterIndex} and {@code quoted} information.
|
||||
*
|
||||
* @param parameterIndex
|
||||
* @param quoted whether or not the parameter is already quoted.
|
||||
*/
|
||||
public ParameterBinding(int parameterIndex, boolean quoted) {
|
||||
|
||||
this.parameterIndex = parameterIndex;
|
||||
this.quoted = quoted;
|
||||
}
|
||||
|
||||
public boolean isQuoted() {
|
||||
return quoted;
|
||||
}
|
||||
|
||||
public int getParameterIndex() {
|
||||
return parameterIndex;
|
||||
}
|
||||
|
||||
public String getParameter() {
|
||||
return "?" + parameterIndex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mysema.query.apt.Configuration;
|
||||
import com.mysema.query.apt.DefaultConfiguration;
|
||||
|
||||
/**
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annoated classes.
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annotated classes.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -25,7 +28,10 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
import com.mysema.query.mongodb.MongodbSerializer;
|
||||
import com.mysema.query.types.Constant;
|
||||
import com.mysema.query.types.Operation;
|
||||
import com.mysema.query.types.Path;
|
||||
import com.mysema.query.types.PathMetadata;
|
||||
import com.mysema.query.types.PathType;
|
||||
@@ -34,9 +40,22 @@ import com.mysema.query.types.PathType;
|
||||
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
private static final Set<PathType> PATH_TYPES;
|
||||
|
||||
static {
|
||||
|
||||
Set<PathType> pathTypes = new HashSet<PathType>();
|
||||
pathTypes.add(PathType.VARIABLE);
|
||||
pathTypes.add(PathType.PROPERTY);
|
||||
|
||||
PATH_TYPES = Collections.unmodifiableSet(pathTypes);
|
||||
}
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final QueryMapper mapper;
|
||||
@@ -44,7 +63,7 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
/**
|
||||
* Creates a new {@link SpringDataMongodbSerializer} for the given {@link MappingContext}.
|
||||
*
|
||||
* @param mappingContext
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public SpringDataMongodbSerializer(MongoConverter converter) {
|
||||
|
||||
@@ -80,10 +99,63 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
@Override
|
||||
protected DBObject asDBObject(String key, Object value) {
|
||||
|
||||
if ("_id".equals(key)) {
|
||||
return super.asDBObject(key, mapper.convertId(value));
|
||||
if (ID_KEY.equals(key)) {
|
||||
return mapper.getMappedObject(super.asDBObject(key, value), null);
|
||||
}
|
||||
|
||||
return super.asDBObject(key, value instanceof Pattern ? value : converter.convertToMongoType(value));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#isReference(com.mysema.query.types.Path)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isReference(Path<?> path) {
|
||||
|
||||
MongoPersistentProperty property = getPropertyFor(path);
|
||||
return property == null ? false : property.isAssociation();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected DBRef asReference(Object constant) {
|
||||
return converter.toDBRef(constant, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(com.mysema.query.types.Operation, int)
|
||||
*/
|
||||
@Override
|
||||
protected DBRef asReference(Operation<?> expr, int constIndex) {
|
||||
|
||||
for (Object arg : expr.getArgs()) {
|
||||
|
||||
if (arg instanceof Path) {
|
||||
|
||||
MongoPersistentProperty property = getPropertyFor((Path<?>) arg);
|
||||
Object constant = ((Constant<?>) expr.getArg(constIndex)).getConstant();
|
||||
|
||||
return converter.toDBRef(constant, property);
|
||||
}
|
||||
}
|
||||
|
||||
return super.asReference(expr, constIndex);
|
||||
}
|
||||
|
||||
private MongoPersistentProperty getPropertyFor(Path<?> path) {
|
||||
|
||||
Path<?> parent = path.getMetadata().getParent();
|
||||
|
||||
if (parent == null || !PATH_TYPES.contains(path.getMetadata().getPathType())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
|
||||
return entity != null ? entity.getPersistentProperty(path.getMetadata().getName()) : null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -122,7 +122,7 @@ public class AuditingViaJavaConfigRepositoriesTests {
|
||||
static interface AuditablePersonRepository extends MongoRepository<AuditablePerson, String> {}
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
@EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true)
|
||||
@EnableMongoAuditing
|
||||
static class SimpleConfigWithRepositories {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@@ -49,11 +50,17 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-454
|
||||
* @see DATAMONGO-1062
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsAddressConfigWithoutASingleParsableServerAddress() {
|
||||
public void rejectsAddressConfigWithoutASingleParsableAndResolvableServerAddress() {
|
||||
|
||||
editor.setAsText("foo, bar");
|
||||
String unknownHost1 = "gugu.nonexistant.example.org";
|
||||
String unknownHost2 = "gaga.nonexistant.example.org";
|
||||
|
||||
assertUnresolveableHostnames(unknownHost1, unknownHost2);
|
||||
|
||||
editor.setAsText(unknownHost1 + "," + unknownHost2);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -193,4 +200,16 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress), port)));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertUnresolveableHostnames(String... hostnames) {
|
||||
|
||||
for (String hostname : hostnames) {
|
||||
try {
|
||||
InetAddress.getByName(hostname);
|
||||
Assert.fail("Supposedly unresolveable hostname '" + hostname + "' can be resolved.");
|
||||
} catch (UnknownHostException expected) {
|
||||
// ok
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link DefaultIndexOperations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class DefaultIndexOperationsIntegrationTests {
|
||||
|
||||
static final DBObject GEO_SPHERE_2D = new BasicDBObject("loaction", "2dsphere");
|
||||
|
||||
@Autowired MongoTemplate template;
|
||||
DefaultIndexOperations indexOps;
|
||||
DBCollection collection;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
String collectionName = this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class);
|
||||
|
||||
this.collection = this.template.getDb().getCollection(collectionName);
|
||||
this.collection.dropIndexes();
|
||||
|
||||
this.indexOps = new DefaultIndexOperations(template, collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1008
|
||||
*/
|
||||
@Test
|
||||
public void getIndexInfoShouldBeAbleToRead2dsphereIndex() {
|
||||
|
||||
collection.createIndex(GEO_SPHERE_2D);
|
||||
|
||||
IndexInfo info = findAndReturnIndexInfo(GEO_SPHERE_2D);
|
||||
assertThat(info.getIndexFields().get(0).isGeo(), is(true));
|
||||
}
|
||||
|
||||
private IndexInfo findAndReturnIndexInfo(DBObject keys) {
|
||||
return findAndReturnIndexInfo(indexOps.getIndexInfo(), keys);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private static IndexInfo findAndReturnIndexInfo(Iterable<IndexInfo> candidates, DBObject keys) {
|
||||
return findAndReturnIndexInfo(candidates, DBCollection.genIndexName(keys));
|
||||
}
|
||||
|
||||
private static IndexInfo findAndReturnIndexInfo(Iterable<IndexInfo> candidates, String name) {
|
||||
|
||||
for (IndexInfo info : candidates) {
|
||||
if (ObjectUtils.nullSafeEquals(name, info.getName())) {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
throw new AssertionError(String.format("Index with %s was not found", name));
|
||||
}
|
||||
|
||||
static class DefaultIndexOperationsIntegrationTestsSample {}
|
||||
}
|
||||
@@ -74,6 +74,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -188,6 +189,7 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(DocumentWithDBRefCollection.class);
|
||||
template.dropCollection(SomeContent.class);
|
||||
template.dropCollection(SomeTemplate.class);
|
||||
template.dropCollection(Address.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -2511,6 +2513,37 @@ public class MongoTemplateTests {
|
||||
assertThat(template.getDb().getCollection("sample").find(new BasicDBObject("field", "data")).count(), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowSavingOfLazyLoadedDbRefs() {
|
||||
|
||||
template.dropCollection(SomeTemplate.class);
|
||||
template.dropCollection(SomeMessage.class);
|
||||
template.dropCollection(SomeContent.class);
|
||||
|
||||
SomeContent content = new SomeContent();
|
||||
content.id = "content-1";
|
||||
content.text = "spring";
|
||||
template.save(content);
|
||||
|
||||
SomeTemplate tmpl = new SomeTemplate();
|
||||
tmpl.id = "template-1";
|
||||
tmpl.content = content; // @DBRef(lazy=true) tmpl.content
|
||||
|
||||
template.save(tmpl);
|
||||
|
||||
SomeTemplate savedTmpl = template.findById(tmpl.id, SomeTemplate.class);
|
||||
|
||||
SomeContent loadedContent = savedTmpl.getContent();
|
||||
loadedContent.setText("data");
|
||||
template.save(loadedContent);
|
||||
|
||||
assertThat(template.findById(content.id, SomeContent.class).getText(), is("data"));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-880
|
||||
*/
|
||||
@@ -2693,6 +2726,39 @@ public class MongoTemplateTests {
|
||||
assertThat(result.getContent().getName(), is(content.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-970
|
||||
*/
|
||||
@Test
|
||||
public void insertsAndRemovesBasicDbObjectCorrectly() {
|
||||
|
||||
BasicDBObject object = new BasicDBObject("key", "value");
|
||||
template.insert(object, "collection");
|
||||
|
||||
assertThat(object.get("_id"), is(notNullValue()));
|
||||
assertThat(template.findAll(DBObject.class, "collection"), hasSize(1));
|
||||
|
||||
template.remove(object, "collection");
|
||||
assertThat(template.findAll(DBObject.class, "collection"), hasSize(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1207
|
||||
*/
|
||||
@Test
|
||||
public void ignoresNullElementsForInsertAll() {
|
||||
|
||||
Address newYork = new Address("NY", "New York");
|
||||
Address washington = new Address("DC", "Washington");
|
||||
|
||||
template.insertAll(Arrays.asList(newYork, null, washington));
|
||||
|
||||
List<Address> result = template.findAll(Address.class);
|
||||
|
||||
assertThat(result, hasSize(2));
|
||||
assertThat(result, hasItems(newYork, washington));
|
||||
}
|
||||
|
||||
static class DoucmentWithNamedIdField {
|
||||
|
||||
@Id String someIdKey;
|
||||
@@ -2744,6 +2810,7 @@ public class MongoTemplateTests {
|
||||
|
||||
@Id public String id;
|
||||
|
||||
@Field("db_ref_list")/** @see DATAMONGO-1058 */
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
public List<Sample> dbRefAnnotatedList;
|
||||
|
||||
@@ -2878,6 +2945,41 @@ public class MongoTemplateTests {
|
||||
|
||||
String state;
|
||||
String city;
|
||||
|
||||
Address() {}
|
||||
|
||||
Address(String state, String city) {
|
||||
this.state = state;
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof Address)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Address that = (Address) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.city, that.city) && //
|
||||
ObjectUtils.nullSafeEquals(this.state, that.state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.city);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.state);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
static class VersionedPerson {
|
||||
@@ -2940,6 +3042,11 @@ public class MongoTemplateTests {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setText(String text) {
|
||||
this.text = text;
|
||||
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@@ -42,7 +42,10 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
@@ -50,17 +53,22 @@ import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.CommandResult;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.ReadPreference;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link MongoTemplate}.
|
||||
@@ -329,6 +337,94 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
verify(collection, never()).remove(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-948
|
||||
*/
|
||||
@Test
|
||||
public void sortShouldBeTakenAsIsWhenExecutingQueryWithoutSpecificTypeInformation() {
|
||||
|
||||
Query query = Query.query(Criteria.where("foo").is("bar")).with(new Sort("foo"));
|
||||
template.executeQuery(query, "collection1", new DocumentCallbackHandler() {
|
||||
|
||||
@Override
|
||||
public void processDocument(DBObject dbObject) throws MongoException, DataAccessException {
|
||||
// nothing to do - just a test
|
||||
}
|
||||
});
|
||||
|
||||
ArgumentCaptor<DBObject> captor = ArgumentCaptor.forClass(DBObject.class);
|
||||
verify(cursor, times(1)).sort(captor.capture());
|
||||
assertThat(captor.getValue(), equalTo(new BasicDBObjectBuilder().add("foo", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void aggregateShouldHonorReadPreferenceWhenSet() {
|
||||
|
||||
CommandResult result = mock(CommandResult.class);
|
||||
|
||||
when(result.get("result")).thenReturn(Collections.emptySet());
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(result);
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(result);
|
||||
template.setReadPreference(ReadPreference.secondary());
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void aggregateShouldIgnoreReadPreferenceWhenNotSet() {
|
||||
|
||||
CommandResult result = mock(CommandResult.class);
|
||||
|
||||
when(result.get("result")).thenReturn(Collections.emptySet());
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(result);
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(result);
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void geoNearShouldHonorReadPreferenceWhenSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class)))
|
||||
.thenReturn(mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
template.setReadPreference(ReadPreference.secondary());
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 1));
|
||||
template.geoNear(query, Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void geoNearShouldIgnoreReadPreferenceWhenNotSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class)))
|
||||
.thenReturn(mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 1));
|
||||
template.geoNear(query, Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
class AutogenerateableId {
|
||||
|
||||
@Id BigInteger id;
|
||||
|
||||
@@ -1,3 +1,18 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
@@ -15,6 +30,7 @@ import org.bson.types.Binary;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
@@ -26,6 +42,7 @@ import com.mongodb.DBRef;
|
||||
* Unit tests for {@link CustomConversions}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @auhtor Christoph Strobl
|
||||
*/
|
||||
public class CustomConversionsUnitTests {
|
||||
|
||||
@@ -197,6 +214,35 @@ public class CustomConversionsUnitTests {
|
||||
assertThat(conversionService.convert(new DateTime(), Date.class), is(new Date(0)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldSelectPropertCustomWriteTargetForCglibProxiedType() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(FormatToStringConverter.INSTANCE));
|
||||
assertThat(conversions.getCustomWriteTarget(createProxyTypeFor(Format.class)), is(typeCompatibleWith(String.class)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldSelectPropertCustomReadTargetForCglibProxiedType() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(CustomObjectToStringConverter.INSTANCE));
|
||||
assertThat(conversions.hasCustomReadTarget(createProxyTypeFor(Object.class), String.class), is(true));
|
||||
}
|
||||
|
||||
private static Class<?> createProxyTypeFor(Class<?> type) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setProxyTargetClass(true);
|
||||
factory.setTargetClass(type);
|
||||
|
||||
return factory.getProxy().getClass();
|
||||
}
|
||||
|
||||
enum FormatToStringConverter implements Converter<Format, String> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -251,4 +297,15 @@ public class CustomConversionsUnitTests {
|
||||
return new Date(0);
|
||||
}
|
||||
}
|
||||
|
||||
enum CustomObjectToStringConverter implements Converter<Object, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public String convert(Object source) {
|
||||
return source != null ? source.toString() : null;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -429,6 +429,27 @@ public class DbRefMappingMongoConverterUnitTests {
|
||||
assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride2, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-987
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotGenerateLazyLoadingProxyForNullValues() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs();
|
||||
lazyDbRefs.id = "42";
|
||||
converter.write(lazyDbRefs, dbo);
|
||||
|
||||
ClassWithLazyDbRefs result = converter.read(ClassWithLazyDbRefs.class, dbo);
|
||||
|
||||
assertThat(result.id, is(lazyDbRefs.id));
|
||||
assertThat(result.dbRefToInterface, is(nullValue()));
|
||||
assertThat(result.dbRefToConcreteCollection, is(nullValue()));
|
||||
assertThat(result.dbRefToConcreteType, is(nullValue()));
|
||||
assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor, is(nullValue()));
|
||||
assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor, is(nullValue()));
|
||||
}
|
||||
|
||||
private Object transport(Object result) {
|
||||
return SerializationUtils.deserialize(SerializationUtils.serialize(result));
|
||||
}
|
||||
|
||||
@@ -15,10 +15,26 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.*;
|
||||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.getAsDBObject;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.getTypedValue;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
@@ -45,16 +61,23 @@ import org.hamcrest.Matchers;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.beans.ConversionNotSupportedException;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.annotation.TypeAlias;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.geo.Box;
|
||||
import org.springframework.data.geo.Circle;
|
||||
import org.springframework.data.geo.Distance;
|
||||
@@ -67,6 +90,7 @@ import org.springframework.data.mapping.model.MappingInstantiationException;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.convert.DBObjectAccessorUnitTests.NestedType;
|
||||
import org.springframework.data.mongodb.core.convert.DBObjectAccessorUnitTests.ProjectingType;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverterUnitTests.ClassWithMapUsingEnumAsKey.FooBarEnum;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
@@ -98,6 +122,8 @@ public class MappingMongoConverterUnitTests {
|
||||
@Mock ApplicationContext context;
|
||||
@Mock DbRefResolver resolver;
|
||||
|
||||
public @Rule ExpectedException exception = ExpectedException.none();
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
@@ -1807,6 +1833,138 @@ public class MappingMongoConverterUnitTests {
|
||||
assertThat(result.shape, is((Shape) sphere));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldWriteCglibProxiedClassTypeInformationCorrectly() {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTargetClass(GenericType.class);
|
||||
factory.setProxyTargetClass(true);
|
||||
|
||||
GenericType<?> proxied = (GenericType<?>) factory.getProxy();
|
||||
BasicDBObject dbo = new BasicDBObject();
|
||||
converter.write(proxied, dbo);
|
||||
|
||||
assertThat(dbo.get("_class"), is((Object) GenericType.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1001
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseTargetObjectOfLazyLoadingProxyWhenWriting() {
|
||||
|
||||
LazyLoadingProxy mock = mock(LazyLoadingProxy.class);
|
||||
|
||||
BasicDBObject dbo = new BasicDBObject();
|
||||
converter.write(mock, dbo);
|
||||
|
||||
verify(mock, times(1)).initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1034
|
||||
*/
|
||||
@Test
|
||||
public void rejectsBasicDbListToBeConvertedIntoComplexType() {
|
||||
|
||||
BasicDBList inner = new BasicDBList();
|
||||
inner.add("key");
|
||||
inner.add("value");
|
||||
|
||||
BasicDBList outer = new BasicDBList();
|
||||
outer.add(inner);
|
||||
outer.add(inner);
|
||||
|
||||
BasicDBObject source = new BasicDBObject("attributes", outer);
|
||||
|
||||
exception.expect(MappingException.class);
|
||||
exception.expectMessage(Item.class.getName());
|
||||
exception.expectMessage(BasicDBList.class.getName());
|
||||
|
||||
converter.read(Item.class, source);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1058
|
||||
*/
|
||||
@Test
|
||||
public void readShouldRespectExplicitFieldNameForDbRef() {
|
||||
|
||||
BasicDBObject source = new BasicDBObject();
|
||||
source.append("explict-name-for-db-ref", new DBRef(mock(DB.class), "foo", "1"));
|
||||
|
||||
converter.read(ClassWithExplicitlyNamedDBRefProperty.class, source);
|
||||
|
||||
verify(resolver, times(1)).resolveDbRef(Mockito.any(MongoPersistentProperty.class), Mockito.any(DBRef.class),
|
||||
Mockito.any(DbRefResolverCallback.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void convertsMapKeyUsingCustomConverterForAndBackwards() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new FooBarEnumToStringConverter(),
|
||||
new StringToFooNumConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
ClassWithMapUsingEnumAsKey source = new ClassWithMapUsingEnumAsKey();
|
||||
source.map = new HashMap<FooBarEnum, String>();
|
||||
source.map.put(FooBarEnum.FOO, "wohoo");
|
||||
|
||||
DBObject target = new BasicDBObject();
|
||||
converter.write(source, target);
|
||||
|
||||
assertThat(converter.read(ClassWithMapUsingEnumAsKey.class, target).map, is(source.map));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
public void writesMapKeyUsingCustomConverter() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new FooBarEnumToStringConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
ClassWithMapUsingEnumAsKey source = new ClassWithMapUsingEnumAsKey();
|
||||
source.map = new HashMap<FooBarEnum, String>();
|
||||
source.map.put(FooBarEnum.FOO, "spring");
|
||||
source.map.put(FooBarEnum.BAR, "data");
|
||||
|
||||
DBObject target = new BasicDBObject();
|
||||
converter.write(source, target);
|
||||
|
||||
DBObject map = DBObjectTestUtils.getAsDBObject(target, "map");
|
||||
|
||||
assertThat(map.containsField("foo-enum-value"), is(true));
|
||||
assertThat(map.containsField("bar-enum-value"), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
public void readsMapKeyUsingCustomConverter() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new StringToFooNumConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
DBObject source = new BasicDBObject("map", new BasicDBObject("foo-enum-value", "spring"));
|
||||
|
||||
ClassWithMapUsingEnumAsKey target = converter.read(ClassWithMapUsingEnumAsKey.class, source);
|
||||
|
||||
assertThat(target.map.get(FooBarEnum.FOO), is("spring"));
|
||||
}
|
||||
|
||||
static class GenericType<T> {
|
||||
T content;
|
||||
}
|
||||
@@ -2057,4 +2215,60 @@ public class MappingMongoConverterUnitTests {
|
||||
|
||||
Shape shape;
|
||||
}
|
||||
|
||||
class ClassWithExplicitlyNamedDBRefProperty {
|
||||
|
||||
@Field("explict-name-for-db-ref")//
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
ClassWithIntId dbRefProperty;
|
||||
|
||||
public ClassWithIntId getDbRefProperty() {
|
||||
return dbRefProperty;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class ClassWithMapUsingEnumAsKey {
|
||||
|
||||
static enum FooBarEnum {
|
||||
FOO, BAR;
|
||||
}
|
||||
|
||||
Map<FooBarEnum, String> map;
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
static class FooBarEnumToStringConverter implements Converter<FooBarEnum, String> {
|
||||
|
||||
@Override
|
||||
public String convert(FooBarEnum source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return FooBarEnum.FOO.equals(source) ? "foo-enum-value" : "bar-enum-value";
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
static class StringToFooNumConverter implements Converter<String, FooBarEnum> {
|
||||
|
||||
@Override
|
||||
public FooBarEnum convert(String source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (source.equals("foo-enum-value")) {
|
||||
return FooBarEnum.FOO;
|
||||
}
|
||||
if (source.equals("bar-enum-value")) {
|
||||
return FooBarEnum.BAR;
|
||||
}
|
||||
|
||||
throw new ConversionNotSupportedException(source, String.class, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -163,8 +163,8 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
|
||||
assertThat(keysCaptor.getValue(), equalTo(new BasicDBObjectBuilder().add("company.address.location", "2d").get()));
|
||||
assertThat(optionsCaptor.getValue(), equalTo(new BasicDBObjectBuilder().add("name", "location").add("min", -180)
|
||||
.add("max", 180).add("bits", 26).get()));
|
||||
assertThat(optionsCaptor.getValue(), equalTo(new BasicDBObjectBuilder().add("name", "company.address.location")
|
||||
.add("min", -180).add("max", 180).add("bits", 26).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,10 +20,13 @@ import static org.hamcrest.collection.IsEmptyCollection.*;
|
||||
import static org.hamcrest.core.IsEqual.*;
|
||||
import static org.hamcrest.core.IsInstanceOf.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.collection.IsEmptyCollection;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Suite;
|
||||
@@ -38,6 +41,9 @@ import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntityTestDummy.MongoPersistentEntityDummyBuilder;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
|
||||
@@ -289,9 +295,8 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CompoundIndexOnLevelZero.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(indexDefinition.getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("name", "compound_index").add("unique", true).add("dropDups", true)
|
||||
.add("sparse", true).add("background", true).add("expireAfterSeconds", 10L).get()));
|
||||
assertThat(indexDefinition.getIndexOptions(), equalTo(new BasicDBObjectBuilder().add("name", "compound_index")
|
||||
.add("unique", true).add("dropDups", true).add("sparse", true).add("background", true).get()));
|
||||
assertThat(indexDefinition.getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get()));
|
||||
}
|
||||
|
||||
@@ -304,9 +309,8 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexDefinedOnSuperClass.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(indexDefinition.getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("name", "compound_index").add("unique", true).add("dropDups", true)
|
||||
.add("sparse", true).add("background", true).add("expireAfterSeconds", 10L).get()));
|
||||
assertThat(indexDefinition.getIndexOptions(), equalTo(new BasicDBObjectBuilder().add("name", "compound_index")
|
||||
.add("unique", true).add("dropDups", true).add("sparse", true).add("background", true).get()));
|
||||
assertThat(indexDefinition.getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get()));
|
||||
}
|
||||
|
||||
@@ -322,7 +326,7 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
assertThat(
|
||||
indexDefinition.getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("unique", true).add("dropDups", true).add("sparse", true)
|
||||
.add("background", true).add("expireAfterSeconds", 10L).get()));
|
||||
.add("background", true).get()));
|
||||
assertThat(indexDefinition.getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("foo", 1).add("bar", -1).get()));
|
||||
}
|
||||
|
||||
@@ -364,6 +368,22 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
assertIndexPathAndCollection(new String[] { "foo", "bar" }, "CompoundIndexOnLevelZero", indexDefinitions.get(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-963
|
||||
*/
|
||||
@Test
|
||||
public void compoundIndexShouldIncludeTTLWhenConsistingOfOnlyOneKey() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CompoundIndexWithOnlyOneKeyAndTTL.class);
|
||||
|
||||
IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition();
|
||||
assertThat(
|
||||
indexDefinition.getIndexOptions(),
|
||||
equalTo(new BasicDBObjectBuilder().add("unique", true).add("dropDups", true).add("sparse", true)
|
||||
.add("background", true).add("expireAfterSeconds", 10L).get()));
|
||||
assertThat(indexDefinition.getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("foo", 1).get()));
|
||||
}
|
||||
|
||||
@Document(collection = "CompoundIndexOnLevelOne")
|
||||
static class CompoundIndexOnLevelOne {
|
||||
|
||||
@@ -400,6 +420,13 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
static class ComountIndexWithAutogeneratedName {
|
||||
|
||||
}
|
||||
|
||||
@Document(collection = "CompoundIndexWithOnlyOneKeyAndTTL")
|
||||
@CompoundIndex(def = "{'foo': 1}", background = true, dropDups = true, expireAfterSeconds = 10, sparse = true,
|
||||
unique = true)
|
||||
static class CompoundIndexWithOnlyOneKeyAndTTL {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static class MixedIndexResolutionTests {
|
||||
@@ -469,9 +496,7 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(CycleOnLevelOne.class);
|
||||
assertIndexPathAndCollection("reference.indexedProperty", "cycleOnLevelOne", indexDefinitions.get(0));
|
||||
assertIndexPathAndCollection("reference.cyclicReference.reference.indexedProperty", "cycleOnLevelOne",
|
||||
indexDefinitions.get(1));
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -488,6 +513,164 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
assertThat(indexDefinitions, hasSize(3));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-949
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotDetectCycleInSimilarlyNamedProperties() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(SimilarityHolingBean.class);
|
||||
assertIndexPathAndCollection("norm", "similarityHolingBean", indexDefinitions.get(0));
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldDetectSelfCycleViaCollectionTypeCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(SelfCyclingViaCollectionType.class);
|
||||
assertThat(indexDefinitions, IsEmptyCollection.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotDetectCycleWhenTypeIsUsedMoreThanOnce() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultipleObjectsOfSameType.class);
|
||||
assertThat(indexDefinitions, IsEmptyCollection.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldCatchCyclicReferenceExceptionOnRoot() {
|
||||
|
||||
Document documentDummy = new Document() {
|
||||
|
||||
@Override
|
||||
public Class<? extends Annotation> annotationType() {
|
||||
return Document.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String collection() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
MongoPersistentProperty propertyMock = mock(MongoPersistentProperty.class);
|
||||
when(propertyMock.isEntity()).thenReturn(true);
|
||||
when(propertyMock.getActualType()).thenThrow(
|
||||
new MongoPersistentEntityIndexResolver.CyclicPropertyReferenceException("foo", Object.class, "bar"));
|
||||
|
||||
MongoPersistentEntity<SelfCyclingViaCollectionType> dummy = MongoPersistentEntityDummyBuilder
|
||||
.forClass(SelfCyclingViaCollectionType.class).withCollection("foo").and(propertyMock)
|
||||
.and(documentDummy).build();
|
||||
|
||||
new MongoPersistentEntityIndexResolver(prepareMappingContext(SelfCyclingViaCollectionType.class))
|
||||
.resolveIndexForEntity(dummy);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUsePathIndexAsIndexNameForDocumentsHavingNamedNestedCompoundIndexFixedOnCollection() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNestedDocumentHavingNamedCompoundIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("propertyOfTypeHavingNamedCompoundIndex.c_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseIndexNameForNestedTypesWithNamedCompoundIndexDefinition() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNestedTypeHavingNamedCompoundIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("propertyOfTypeHavingNamedCompoundIndex.c_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUsePathIndexAsIndexNameForDocumentsHavingNamedNestedIndexFixedOnCollection() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNestedDocumentHavingNamedIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("propertyOfTypeHavingNamedIndex.property_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseIndexNameForNestedTypesWithNamedIndexDefinition() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNestedTypeHavingNamedIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("propertyOfTypeHavingNamedIndex.property_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1025
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseIndexNameOnRootLevel() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNamedIndex.class);
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("property_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1087
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnRoot() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultiplePropertiesOfSameTypeWithMatchingStartLetters.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("name.component"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("nameLast.component"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1087
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("component.nameLast"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("component.name"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1121
|
||||
*/
|
||||
@Test
|
||||
public void shouldOnlyConsiderEntitiesAsPotentialCycleCandidates() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("path1.foo"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"),
|
||||
equalTo("path2.propertyWithIndexedStructure.foo"));
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
static class MixedIndexRoot {
|
||||
|
||||
@@ -554,6 +737,115 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
|
||||
@Indexed String foo;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class SimilarityHolingBean {
|
||||
|
||||
@Indexed @Field("norm") String normalProperty;
|
||||
@Field("similarityL") private List<SimilaritySibling> listOfSimilarilyNamedEntities = null;
|
||||
}
|
||||
|
||||
static class SimilaritySibling {
|
||||
@Field("similarity") private String similarThoughNotEqualNamedProperty;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class MultipleObjectsOfSameType {
|
||||
|
||||
SelfCyclingViaCollectionType cycleOne;
|
||||
|
||||
SelfCyclingViaCollectionType cycleTwo;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class SelfCyclingViaCollectionType {
|
||||
|
||||
List<SelfCyclingViaCollectionType> cyclic;
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
@CompoundIndex(name = "c_index", def = "{ foo:1, bar:1 }")
|
||||
static class DocumentWithNamedCompoundIndex {
|
||||
|
||||
String property;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNamedIndex {
|
||||
|
||||
@Indexed(name = "property_index") String property;
|
||||
}
|
||||
|
||||
static class TypeWithNamedIndex {
|
||||
|
||||
@Indexed(name = "property_index") String property;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNestedDocumentHavingNamedCompoundIndex {
|
||||
|
||||
DocumentWithNamedCompoundIndex propertyOfTypeHavingNamedCompoundIndex;
|
||||
}
|
||||
|
||||
@CompoundIndex(name = "c_index", def = "{ foo:1, bar:1 }")
|
||||
static class TypeWithNamedCompoundIndex {
|
||||
String property;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNestedTypeHavingNamedCompoundIndex {
|
||||
|
||||
TypeWithNamedCompoundIndex propertyOfTypeHavingNamedCompoundIndex;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNestedDocumentHavingNamedIndex {
|
||||
|
||||
DocumentWithNamedIndex propertyOfTypeHavingNamedIndex;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNestedTypeHavingNamedIndex {
|
||||
|
||||
TypeWithNamedIndex propertyOfTypeHavingNamedIndex;
|
||||
}
|
||||
|
||||
@Document
|
||||
public class MultiplePropertiesOfSameTypeWithMatchingStartLetters {
|
||||
|
||||
public class NameComponent {
|
||||
|
||||
@Indexed String component;
|
||||
}
|
||||
|
||||
NameComponent name;
|
||||
NameComponent nameLast;
|
||||
}
|
||||
|
||||
@Document
|
||||
public class MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty {
|
||||
|
||||
public class NameComponent {
|
||||
|
||||
@Indexed String nameLast;
|
||||
@Indexed String name;
|
||||
}
|
||||
|
||||
NameComponent component;
|
||||
}
|
||||
|
||||
@Document
|
||||
public static class OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths {
|
||||
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested path1;
|
||||
AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument path2;
|
||||
}
|
||||
|
||||
public static class AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument {
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested propertyWithIndexedStructure;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static List<IndexDefinitionHolder> prepareMappingContextAndResolveIndexForType(Class<?> type) {
|
||||
@@ -586,4 +878,5 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
}
|
||||
assertThat(holder.getCollection(), equalTo(expectedCollection));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link Path}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class PathUnitTests {
|
||||
|
||||
@Mock MongoPersistentEntity<?> entityMock;
|
||||
|
||||
@Before
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public void setUp() {
|
||||
when(entityMock.getType()).thenReturn((Class) Object.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldIdentifyCycleForOwnerOfSameTypeAndMatchingPath() {
|
||||
|
||||
MongoPersistentProperty property = createPersistentPropertyMock(entityMock, "foo");
|
||||
assertThat(new Path(property, "foo.bar").cycles(property, "foo.bar.bar"), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("rawtypes")
|
||||
public void shouldAllowMatchingPathForDifferentOwners() {
|
||||
|
||||
MongoPersistentProperty existing = createPersistentPropertyMock(entityMock, "foo");
|
||||
|
||||
MongoPersistentEntity entityOfDifferentType = Mockito.mock(MongoPersistentEntity.class);
|
||||
when(entityOfDifferentType.getType()).thenReturn(String.class);
|
||||
MongoPersistentProperty toBeVerified = createPersistentPropertyMock(entityOfDifferentType, "foo");
|
||||
|
||||
assertThat(new Path(existing, "foo.bar").cycles(toBeVerified, "foo.bar.bar"), is(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-962
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowEqaulPropertiesOnDifferentPaths() {
|
||||
|
||||
MongoPersistentProperty property = createPersistentPropertyMock(entityMock, "foo");
|
||||
assertThat(new Path(property, "foo.bar").cycles(property, "foo2.bar.bar"), is(false));
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private MongoPersistentProperty createPersistentPropertyMock(MongoPersistentEntity owner, String fieldname) {
|
||||
|
||||
MongoPersistentProperty property = Mockito.mock(MongoPersistentProperty.class);
|
||||
when(property.getOwner()).thenReturn(owner);
|
||||
when(property.getFieldName()).thenReturn(fieldname);
|
||||
return property;
|
||||
}
|
||||
}
|
||||
@@ -103,6 +103,7 @@ public class MongoMappingContextUnitTests {
|
||||
exception.expectMessage("firstname");
|
||||
exception.expectMessage("lastname");
|
||||
exception.expectMessage("foo");
|
||||
exception.expectMessage("@Field");
|
||||
|
||||
MongoMappingContext context = new MongoMappingContext();
|
||||
context.setApplicationContext(applicationContext);
|
||||
|
||||
@@ -0,0 +1,212 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.SimpleAssociationHandler;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
/**
|
||||
* Trivial dummy implementation of {@link MongoPersistentEntity} to be used in tests.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @param <T>
|
||||
*/
|
||||
public class MongoPersistentEntityTestDummy<T> implements MongoPersistentEntity<T> {
|
||||
|
||||
private Map<Class<?>, Annotation> annotations = new HashMap<Class<?>, Annotation>();
|
||||
private Collection<MongoPersistentProperty> properties = new ArrayList<MongoPersistentProperty>();
|
||||
private String collection;
|
||||
private String name;
|
||||
private Class<T> type;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PreferredConstructor<T, MongoPersistentProperty> getPersistenceConstructor() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isConstructorArgument(PersistentProperty<?> property) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isIdProperty(PersistentProperty<?> property) {
|
||||
return property != null ? property.isIdProperty() : false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isVersionProperty(PersistentProperty<?> property) {
|
||||
return property != null ? property.isIdProperty() : false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getIdProperty() {
|
||||
return getPersistentProperty(Id.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getVersionProperty() {
|
||||
return getPersistentProperty(Version.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getPersistentProperty(String name) {
|
||||
|
||||
for (MongoPersistentProperty p : this.properties) {
|
||||
if (p.getName().equals(name)) {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getPersistentProperty(Class<? extends Annotation> annotationType) {
|
||||
|
||||
for (MongoPersistentProperty p : this.properties) {
|
||||
if (p.isAnnotationPresent(annotationType)) {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasIdProperty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasVersionProperty() {
|
||||
return getVersionProperty() != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getType() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getTypeAlias() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeInformation<T> getTypeInformation() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithProperties(PropertyHandler<MongoPersistentProperty> handler) {
|
||||
|
||||
for (MongoPersistentProperty p : this.properties) {
|
||||
handler.doWithPersistentProperty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithProperties(SimplePropertyHandler handler) {
|
||||
|
||||
for (MongoPersistentProperty p : this.properties) {
|
||||
handler.doWithPersistentProperty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithAssociations(AssociationHandler<MongoPersistentProperty> handler) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithAssociations(SimpleAssociationHandler handler) {
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public <A extends Annotation> A findAnnotation(Class<A> annotationType) {
|
||||
return (A) this.annotations.get(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCollection() {
|
||||
return this.collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple builder to create {@link MongoPersistentEntityTestDummy} with defined properties.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @param <T>
|
||||
*/
|
||||
public static class MongoPersistentEntityDummyBuilder<T> {
|
||||
|
||||
private MongoPersistentEntityTestDummy<T> instance;
|
||||
|
||||
private MongoPersistentEntityDummyBuilder(Class<T> type) {
|
||||
this.instance = new MongoPersistentEntityTestDummy<T>();
|
||||
this.instance.type = type;
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public static <T> MongoPersistentEntityDummyBuilder<T> forClass(Class<T> type) {
|
||||
return new MongoPersistentEntityDummyBuilder(type);
|
||||
}
|
||||
|
||||
public MongoPersistentEntityDummyBuilder<T> withName(String name) {
|
||||
this.instance.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MongoPersistentEntityDummyBuilder<T> and(MongoPersistentProperty property) {
|
||||
this.instance.properties.add(property);
|
||||
return this;
|
||||
}
|
||||
|
||||
public MongoPersistentEntityDummyBuilder<T> withCollection(String collection) {
|
||||
this.instance.collection = collection;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MongoPersistentEntityDummyBuilder<T> and(Annotation annotation) {
|
||||
this.instance.annotations.put(annotation.annotationType(), annotation);
|
||||
return this;
|
||||
}
|
||||
|
||||
public MongoPersistentEntityTestDummy<T> build() {
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,17 +16,23 @@
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.data.mongodb.core.mapping.PersonPojoStringId;
|
||||
|
||||
public class PersonBeforeSaveListener implements ApplicationListener<BeforeSaveEvent<PersonPojoStringId>> {
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
public final ArrayList<ApplicationEvent> seenEvents = new ArrayList<ApplicationEvent>();
|
||||
public class PersonBeforeSaveListener extends AbstractMongoEventListener<PersonPojoStringId> {
|
||||
|
||||
public void onApplicationEvent(BeforeSaveEvent<PersonPojoStringId> event) {
|
||||
this.seenEvents.add(event);
|
||||
public final List<ApplicationEvent> seenEvents = new ArrayList<ApplicationEvent>();
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(java.lang.Object, com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeSave(PersonPojoStringId source, DBObject dbo) {
|
||||
seenEvents.add(new BeforeSaveEvent<PersonPojoStringId>(source, dbo));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,6 +32,7 @@ import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.geo.Box;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
@@ -50,6 +51,7 @@ import com.mongodb.Mongo;
|
||||
* Integration test for {@link MongoTemplate}'s Map-Reduce operations
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
@@ -276,6 +278,31 @@ public class MapReduceTests {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-938
|
||||
*/
|
||||
@Test
|
||||
public void mapReduceShouldUseQueryMapper() {
|
||||
|
||||
DBCollection c = mongoTemplate.getDb().getCollection("jmrWithGeo");
|
||||
|
||||
c.save(new BasicDBObject("x", new String[] { "a", "b" }).append("loc", new double[] { 0, 0 }));
|
||||
c.save(new BasicDBObject("x", new String[] { "b", "c" }).append("loc", new double[] { 0, 0 }));
|
||||
c.save(new BasicDBObject("x", new String[] { "c", "d" }).append("loc", new double[] { 0, 0 }));
|
||||
|
||||
Query query = new Query(where("x").ne(new String[] { "a", "b" }).and("loc")
|
||||
.within(new Box(new double[] { 0, 0 }, new double[] { 1, 1 })));
|
||||
|
||||
MapReduceResults<ValueObject> results = template.mapReduce(query, "jmrWithGeo", mapFunction, reduceFunction,
|
||||
ValueObject.class);
|
||||
|
||||
Map<String, Float> m = copyToMap(results);
|
||||
assertEquals(3, m.size());
|
||||
assertEquals(1, m.get("b").intValue());
|
||||
assertEquals(2, m.get("c").intValue());
|
||||
assertEquals(1, m.get("d").intValue());
|
||||
}
|
||||
|
||||
private void performMapReduce(boolean inline, boolean withQuery) {
|
||||
createMapReduceData();
|
||||
MapReduceResults<ValueObject> results;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,12 +22,14 @@ import org.junit.Test;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class CriteriaTests {
|
||||
|
||||
@Test
|
||||
@@ -72,50 +74,94 @@ public class CriteriaTests {
|
||||
assertThat(left, is(not(right)));
|
||||
assertThat(right, is(not(left)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateAndOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.andOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateOrOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.orOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateNorOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.norOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test
|
||||
public void shouldNegateFollowingSimpleExpression() {
|
||||
|
||||
Criteria c = Criteria.where("age").not().gt(18).and("status").is("student");
|
||||
DBObject co = c.getCriteriaObject();
|
||||
|
||||
assertThat(co, is(notNullValue()));
|
||||
assertThat(co.toString(), is("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateAndOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.andOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateOrOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.orOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateNorOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.norOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test
|
||||
public void shouldNegateFollowingSimpleExpression() {
|
||||
|
||||
Criteria c = Criteria.where("age").not().gt(18).and("status").is("student");
|
||||
DBObject co = c.getCriteriaObject();
|
||||
|
||||
assertThat(co, is(notNullValue()));
|
||||
assertThat(co.toString(), is("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldReturnEmptyDBOWhenNoCriteriaSpecified() {
|
||||
|
||||
DBObject dbo = new Criteria().getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$lt", "foo").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").gt("bar").getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$lt", "foo").add("$gt", "bar").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").not().getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$not", new BasicDBObject("$lt", "foo")).get()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import static org.junit.Assert.*;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
@@ -30,6 +31,7 @@ import org.junit.Test;
|
||||
* @author Thomas Risberg
|
||||
* @author Becca Gaspard
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class UpdateTests {
|
||||
|
||||
@@ -284,4 +286,71 @@ public class UpdateTests {
|
||||
public void testCreatingUpdateWithNullKeyThrowsException() {
|
||||
Update.update(null, "value");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-953
|
||||
*/
|
||||
@Test
|
||||
public void testEquality() {
|
||||
|
||||
Update actualUpdate = new Update() //
|
||||
.inc("size", 1) //
|
||||
.set("nl", null) //
|
||||
.set("directory", "/Users/Test/Desktop") //
|
||||
.push("authors", Collections.singletonMap("name", "Sven")) //
|
||||
.pop("authors", Update.Position.FIRST) //
|
||||
.set("foo", "bar");
|
||||
|
||||
Update expectedUpdate = new Update() //
|
||||
.inc("size", 1) //
|
||||
.set("nl", null) //
|
||||
.set("directory", "/Users/Test/Desktop") //
|
||||
.push("authors", Collections.singletonMap("name", "Sven")) //
|
||||
.pop("authors", Update.Position.FIRST) //
|
||||
.set("foo", "bar");
|
||||
|
||||
assertThat(actualUpdate, is(equalTo(actualUpdate)));
|
||||
assertThat(actualUpdate.hashCode(), is(equalTo(actualUpdate.hashCode())));
|
||||
assertThat(actualUpdate, is(equalTo(expectedUpdate)));
|
||||
assertThat(actualUpdate.hashCode(), is(equalTo(expectedUpdate.hashCode())));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-953
|
||||
*/
|
||||
@Test
|
||||
public void testToString() {
|
||||
|
||||
Update actualUpdate = new Update() //
|
||||
.inc("size", 1) //
|
||||
.set("nl", null) //
|
||||
.set("directory", "/Users/Test/Desktop") //
|
||||
.push("authors", Collections.singletonMap("name", "Sven")) //
|
||||
.pop("authors", Update.Position.FIRST) //
|
||||
.set("foo", "bar");
|
||||
|
||||
Update expectedUpdate = new Update() //
|
||||
.inc("size", 1) //
|
||||
.set("nl", null) //
|
||||
.set("directory", "/Users/Test/Desktop") //
|
||||
.push("authors", Collections.singletonMap("name", "Sven")) //
|
||||
.pop("authors", Update.Position.FIRST) //
|
||||
.set("foo", "bar");
|
||||
|
||||
assertThat(actualUpdate.toString(), is(equalTo(expectedUpdate.toString())));
|
||||
assertThat(actualUpdate.toString(), is("{ \"$inc\" : { \"size\" : 1} ," //
|
||||
+ " \"$set\" : { \"nl\" : null , \"directory\" : \"/Users/Test/Desktop\" , \"foo\" : \"bar\"} , " //
|
||||
+ "\"$push\" : { \"authors\" : { \"name\" : \"Sven\"}} " //
|
||||
+ ", \"$pop\" : { \"authors\" : -1}}")); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1002
|
||||
*/
|
||||
@Test
|
||||
public void toStringWorksForUpdateWithComplexObject() {
|
||||
|
||||
Update update = new Update().addToSet("key", new DateTime());
|
||||
assertThat(update.toString(), is(notNullValue()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -880,4 +881,98 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
|
||||
assertThat(result.getContent(), hasSize(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-745
|
||||
*/
|
||||
@Test
|
||||
public void findByCustomQueryFirstnamesInListAndLastname() {
|
||||
|
||||
repository.save(new Person("foo", "bar"));
|
||||
repository.save(new Person("bar", "bar"));
|
||||
repository.save(new Person("fuu", "bar"));
|
||||
repository.save(new Person("notfound", "bar"));
|
||||
|
||||
Page<Person> result = repository.findByCustomQueryFirstnamesAndLastname(Arrays.asList("bar", "foo", "fuu"), "bar",
|
||||
new PageRequest(0, 2));
|
||||
|
||||
assertThat(result.getContent(), hasSize(2));
|
||||
assertThat(result.getTotalPages(), is(2));
|
||||
assertThat(result.getTotalElements(), is(3L));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-745
|
||||
*/
|
||||
@Test
|
||||
public void findByCustomQueryLastnameAndStreetInList() {
|
||||
|
||||
repository.save(new Person("foo", "bar").withAddress(new Address("street1", "1", "SB")));
|
||||
repository.save(new Person("bar", "bar").withAddress(new Address("street2", "1", "SB")));
|
||||
repository.save(new Person("fuu", "bar").withAddress(new Address("street1", "2", "RGB")));
|
||||
repository.save(new Person("notfound", "notfound"));
|
||||
|
||||
Page<Person> result = repository.findByCustomQueryLastnameAndAddressStreetInList("bar",
|
||||
Arrays.asList("street1", "street2"), new PageRequest(0, 2));
|
||||
|
||||
assertThat(result.getContent(), hasSize(2));
|
||||
assertThat(result.getTotalPages(), is(2));
|
||||
assertThat(result.getTotalElements(), is(3L));
|
||||
}
|
||||
|
||||
/**
|
||||
* Ignored for now as this requires Querydsl 3.4.1 to succeed.
|
||||
*
|
||||
* @see DATAMONGO-972
|
||||
*/
|
||||
@Test
|
||||
@Ignore
|
||||
public void shouldExecuteFindOnDbRefCorrectly() {
|
||||
|
||||
operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class);
|
||||
|
||||
User user = new User();
|
||||
user.setUsername("Valerie Matthews");
|
||||
|
||||
operations.save(user);
|
||||
|
||||
dave.setCreator(user);
|
||||
operations.save(dave);
|
||||
|
||||
assertThat(repository.findOne(QPerson.person.creator.eq(user)), is(dave));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-969
|
||||
*/
|
||||
@Test
|
||||
public void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() {
|
||||
assertThat(repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id))), containsInAnyOrder(dave, carter));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1030
|
||||
*/
|
||||
@Test
|
||||
public void executesSingleEntityQueryWithProjectionCorrectly() {
|
||||
|
||||
PersonSummary result = repository.findSummaryByLastname("Beauford");
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.firstname, is("Carter"));
|
||||
assertThat(result.lastname, is("Beauford"));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1072
|
||||
*/
|
||||
@Test
|
||||
public void shouldBindPlaceholdersUsedAsKeysCorrectly() {
|
||||
|
||||
List<Person> persons = repository.findByKeyValue("firstname", alicia.getFirstname());
|
||||
|
||||
assertThat(persons, hasSize(1));
|
||||
assertThat(persons, hasItem(alicia));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,132 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class ComplexIdRepositoryIntegrationTests {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
static class Config extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "complexIdTest";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Autowired UserWithComplexIdRepository repo;
|
||||
@Autowired MongoTemplate template;
|
||||
|
||||
MyId id;
|
||||
UserWithComplexId userWithId;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
repo.deleteAll();
|
||||
|
||||
id = new MyId();
|
||||
id.val1 = "v1";
|
||||
id.val2 = "v2";
|
||||
|
||||
userWithId = new UserWithComplexId();
|
||||
userWithId.firstname = "foo";
|
||||
userWithId.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void annotatedFindQueryShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
assertThat(repo.getUserByComplexId(id), is(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void annotatedFindQueryShouldWorkWhenUsingComplexIdWithinCollection() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
List<UserWithComplexId> loaded = repo.findByUserIds(Collections.singleton(id));
|
||||
|
||||
assertThat(loaded, hasSize(1));
|
||||
assertThat(loaded, contains(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void findOneShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
assertThat(repo.findOne(id), is(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void findAllShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
Iterable<UserWithComplexId> loaded = repo.findAll(Collections.singleton(id));
|
||||
|
||||
assertThat(loaded, is(Matchers.<UserWithComplexId> iterableWithSize(1)));
|
||||
assertThat(loaded, contains(userWithId));
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,7 @@ import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
/**
|
||||
* Sample contactt domain class.
|
||||
* Sample contact domain class.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class MyId implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -7129201311241750831L;
|
||||
|
||||
String val1;
|
||||
String val2;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 31;
|
||||
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(val1);
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(val2);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof MyId)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MyId that = (MyId) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.val1, that.val1) && ObjectUtils.nullSafeEquals(this.val2, that.val2);
|
||||
}
|
||||
}
|
||||
@@ -46,6 +46,8 @@ public class Person extends Contact {
|
||||
@SuppressWarnings("unused") private Sex sex;
|
||||
Date createdAt;
|
||||
|
||||
List<String> skills;
|
||||
|
||||
@GeoSpatialIndexed private Point location;
|
||||
|
||||
private Address address;
|
||||
@@ -261,6 +263,24 @@ public class Person extends Contact {
|
||||
return this.getId().equals(that.getId());
|
||||
}
|
||||
|
||||
public Person withAddress(Address address) {
|
||||
|
||||
this.address = address;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void setCreator(User creator) {
|
||||
this.creator = creator;
|
||||
}
|
||||
|
||||
public void setSkills(List<String> skills) {
|
||||
this.skills = skills;
|
||||
}
|
||||
|
||||
public List<String> getSkills() {
|
||||
return skills;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
|
||||
@@ -290,4 +290,24 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
* @see DATAMONGO-893
|
||||
*/
|
||||
Page<Person> findByAddressIn(List<Address> address, Pageable page);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-745
|
||||
*/
|
||||
@Query("{firstname:{$in:?0}, lastname:?1}")
|
||||
Page<Person> findByCustomQueryFirstnamesAndLastname(List<String> firstnames, String lastname, Pageable page);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-745
|
||||
*/
|
||||
@Query("{lastname:?0, address.street:{$in:?1}}")
|
||||
Page<Person> findByCustomQueryLastnameAndAddressStreetInList(String lastname, List<String> streetNames, Pageable page);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1030
|
||||
*/
|
||||
PersonSummary findSummaryByLastname(String lastname);
|
||||
|
||||
@Query("{ ?0 : ?1 }")
|
||||
List<Person> findByKeyValue(String key, String value);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class PersonSummary {
|
||||
|
||||
String firstname;
|
||||
String lastname;
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Document
|
||||
public class UserWithComplexId {
|
||||
|
||||
@Id MyId id;
|
||||
String firstname;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 31;
|
||||
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(id);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof UserWithComplexId)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UserWithComplexId that = (UserWithComplexId) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.id, that.id);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.repository.CrudRepository;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface UserWithComplexIdRepository extends CrudRepository<UserWithComplexId, MyId> {
|
||||
|
||||
@Query("{'_id': {$in: ?0}}")
|
||||
List<UserWithComplexId> findByUserIds(Collection<MyId> ids);
|
||||
|
||||
@Query("{'_id': ?0}")
|
||||
UserWithComplexId getUserByComplexId(MyId id);
|
||||
}
|
||||
@@ -54,7 +54,7 @@ import com.mongodb.WriteResult;
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class AbstracMongoQueryUnitTests {
|
||||
public class AbstractMongoQueryUnitTests {
|
||||
|
||||
@Mock RepositoryMetadata metadataMock;
|
||||
@Mock MongoOperations mongoOperationsMock;
|
||||
@@ -88,13 +88,15 @@ public class AbstracMongoQueryUnitTests {
|
||||
|
||||
createQueryForMethod("deletePersonByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" });
|
||||
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), Matchers.eq(Person.class),
|
||||
Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(0)).find(Matchers.any(Query.class), Matchers.any(Class.class),
|
||||
Matchers.anyString());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-566
|
||||
* @see DATAMONGO-1040
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
@@ -105,7 +107,8 @@ public class AbstracMongoQueryUnitTests {
|
||||
|
||||
createQueryForMethod("deleteByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" });
|
||||
|
||||
verify(this.mongoOperationsMock, times(1)).findAllAndRemove(Matchers.any(Query.class), Matchers.eq(Person.class));
|
||||
verify(this.mongoOperationsMock, times(1)).findAllAndRemove(Matchers.any(Query.class), Matchers.eq(Person.class),
|
||||
Matchers.eq("persons"));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -122,19 +125,21 @@ public class AbstracMongoQueryUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-566
|
||||
* @see DATAMONGO-978
|
||||
*/
|
||||
@Test
|
||||
public void testDeleteExecutionReturnsNrDocumentsDeletedFromWriteResult() {
|
||||
|
||||
when(writeResultMock.getN()).thenReturn(100);
|
||||
when(this.mongoOperationsMock.remove(Matchers.any(Query.class), Matchers.eq("persons")))
|
||||
when(this.mongoOperationsMock.remove(Matchers.any(Query.class), Matchers.eq(Person.class), Matchers.eq("persons")))
|
||||
.thenReturn(writeResultMock);
|
||||
|
||||
MongoQueryFake query = createQueryForMethod("deletePersonByLastname", String.class);
|
||||
query.setDeleteQuery(true);
|
||||
|
||||
assertThat(query.execute(new Object[] { "fake" }), is((Object) 100L));
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), Matchers.eq(Person.class),
|
||||
Matchers.eq("persons"));
|
||||
}
|
||||
|
||||
private MongoQueryFake createQueryForMethod(String methodName, Class<?>... paramTypes) {
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.repository.query;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
@@ -41,6 +42,7 @@ import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.Person;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
@@ -438,6 +440,144 @@ public class MongoQueryCreatorUnitTests {
|
||||
assertThat(query, is(query(where("firstName").regex("^dave$", "i").and("age").is(42))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1075
|
||||
*/
|
||||
@Test
|
||||
public void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() {
|
||||
|
||||
PartTree tree = new PartTree("findByEmailAddressesContaining", User.class);
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context);
|
||||
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(query, is(query(where("emailAddresses").in("dave"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1139
|
||||
*/
|
||||
@Test
|
||||
public void createsNonShericalNearForDistanceWithDefaultMetric() {
|
||||
|
||||
Point point = new Point(1.0, 1.0);
|
||||
Distance distance = new Distance(1.0);
|
||||
|
||||
PartTree tree = new PartTree("findByLocationNear", Venue.class);
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point, distance), context);
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(query, is(query(where("location").near(point).maxDistance(1.0))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1229
|
||||
*/
|
||||
@Test
|
||||
public void appliesIgnoreCaseToLeafProperty() {
|
||||
|
||||
PartTree tree = new PartTree("findByAddressStreetIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "Street");
|
||||
|
||||
assertThat(new MongoQueryCreator(tree, accessor, context).createQuery(), is(notNullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSource() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "con.flux+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("foo").regex("^\\Qcon.flux+\\E$", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameStartingWithIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "dawns.light+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("foo").regex("^\\Qdawns.light+\\E", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameEndingWithIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "new.ton+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("foo").regex("\\Qnew.ton+\\E$", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("foo").regex(".*\\Qfire.fight+\\E.*"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("foo").regex(".*\\Qsteel.heart+\\E"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "cala.mity+*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
assertThat(query, is(query(where("foo").regex("\\Qcala.mity+\\E.*"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
assertThat(query, is(query(where("foo").regex(".*"))));
|
||||
}
|
||||
|
||||
interface PersonRepository extends Repository<Person, Long> {
|
||||
|
||||
List<Person> findByLocationNearAndFirstname(Point location, Distance maxDistance, String firstname);
|
||||
@@ -448,5 +588,14 @@ public class MongoQueryCreatorUnitTests {
|
||||
@Field("foo") String username;
|
||||
|
||||
@DBRef User creator;
|
||||
|
||||
List<String> emailAddresses;
|
||||
|
||||
Address address;
|
||||
}
|
||||
|
||||
class Address {
|
||||
|
||||
String street;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Matchers;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.mongodb.repository.Person;
|
||||
import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link PartTreeMongoQuery}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class PartTreeMongoQueryUnitTests {
|
||||
|
||||
@Mock RepositoryMetadata metadataMock;
|
||||
@Mock MongoOperations mongoOperationsMock;
|
||||
|
||||
MongoMappingContext mappingContext;
|
||||
|
||||
public @Rule ExpectedException exception = ExpectedException.none();
|
||||
|
||||
@Before
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public void setUp() {
|
||||
|
||||
when(metadataMock.getDomainType()).thenReturn((Class) Person.class);
|
||||
when(metadataMock.getReturnedDomainClass(Matchers.any(Method.class))).thenReturn((Class) Person.class);
|
||||
mappingContext = new MongoMappingContext();
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDbFactory.class));
|
||||
MongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext);
|
||||
|
||||
when(mongoOperationsMock.getConverter()).thenReturn(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMOGO-952
|
||||
*/
|
||||
@Test
|
||||
public void rejectsInvalidFieldSpecification() {
|
||||
|
||||
exception.expect(IllegalStateException.class);
|
||||
exception.expectMessage("findByLastname");
|
||||
|
||||
deriveQueryFromMethod("findByLastname", new Object[] { "foo" });
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMOGO-952
|
||||
*/
|
||||
@Test
|
||||
public void singleFieldJsonIncludeRestrictionShouldBeConsidered() {
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstname",
|
||||
new Object[] { "foo" });
|
||||
|
||||
assertThat(query.getFieldsObject(), is(new BasicDBObjectBuilder().add("firstname", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMOGO-952
|
||||
*/
|
||||
@Test
|
||||
public void multiFieldJsonIncludeRestrictionShouldBeConsidered() {
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstnameAndLastname",
|
||||
new Object[] { "foo", "bar" });
|
||||
|
||||
assertThat(query.getFieldsObject(), is(new BasicDBObjectBuilder().add("firstname", 1).add("lastname", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMOGO-952
|
||||
*/
|
||||
@Test
|
||||
public void multiFieldJsonExcludeRestrictionShouldBeConsidered() {
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findPersonByFirstnameAndLastname",
|
||||
new Object[] { "foo", "bar" });
|
||||
|
||||
assertThat(query.getFieldsObject(), is(new BasicDBObjectBuilder().add("firstname", 0).add("lastname", 0).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1180
|
||||
*/
|
||||
@Test
|
||||
public void propagatesRootExceptionForInvalidQuery() {
|
||||
|
||||
exception.expect(IllegalStateException.class);
|
||||
exception.expectCause(is(org.hamcrest.Matchers.<Throwable> instanceOf(JSONParseException.class)));
|
||||
|
||||
deriveQueryFromMethod("findByAge", new Object[] { 1 });
|
||||
}
|
||||
|
||||
private org.springframework.data.mongodb.core.query.Query deriveQueryFromMethod(String method, Object[] args) {
|
||||
|
||||
Class<?>[] types = new Class<?>[args.length];
|
||||
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
types[i] = args[i].getClass();
|
||||
}
|
||||
|
||||
PartTreeMongoQuery partTreeQuery = createQueryForMethod(method, types);
|
||||
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(partTreeQuery.getQueryMethod(), args);
|
||||
return partTreeQuery.createQuery(new ConvertingParameterAccessor(mongoOperationsMock.getConverter(), accessor));
|
||||
}
|
||||
|
||||
private PartTreeMongoQuery createQueryForMethod(String methodName, Class<?>... paramTypes) {
|
||||
|
||||
try {
|
||||
|
||||
Method method = Repo.class.getMethod(methodName, paramTypes);
|
||||
MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadataMock, mappingContext);
|
||||
|
||||
return new PartTreeMongoQuery(queryMethod, mongoOperationsMock);
|
||||
} catch (NoSuchMethodException e) {
|
||||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
} catch (SecurityException e) {
|
||||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
interface Repo extends MongoRepository<Person, Long> {
|
||||
|
||||
@Query(fields = "firstname")
|
||||
Person findByLastname(String lastname);
|
||||
|
||||
@Query(fields = "{ 'firstname' : 1 }")
|
||||
Person findByFirstname(String lastname);
|
||||
|
||||
@Query(fields = "{ 'firstname' : 1, 'lastname' : 1 }")
|
||||
Person findByFirstnameAndLastname(String firstname, String lastname);
|
||||
|
||||
@Query(fields = "{ 'firstname' : 0, 'lastname' : 0 }")
|
||||
Person findPersonByFirstnameAndLastname(String firstname, String lastname);
|
||||
|
||||
@Query(fields = "{ 'firstname }")
|
||||
Person findByAge(Integer age);
|
||||
}
|
||||
}
|
||||
@@ -20,6 +20,9 @@ import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -39,6 +42,7 @@ import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -46,6 +50,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class StringBasedMongoQueryUnitTests {
|
||||
@@ -158,6 +163,114 @@ public class StringBasedMongoQueryUnitTests {
|
||||
createQueryForMethod("invalidMethod", String.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-420
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportFindByParameterizedCriteriaAndFields() throws Exception {
|
||||
|
||||
ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] {
|
||||
new BasicDBObject("firstname", "first").append("lastname", "last"), Collections.singletonMap("lastname", 1) });
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByParameterizedCriteriaAndFields", DBObject.class,
|
||||
Map.class);
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor);
|
||||
|
||||
assertThat(query.getQueryObject(),
|
||||
is(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject()));
|
||||
assertThat(query.getFieldsObject(), is(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-420
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportRespectExistingQuotingInFindByTitleBeginsWithExplicitQuoting() throws Exception {
|
||||
|
||||
ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { "fun" });
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByTitleBeginsWithExplicitQuoting", String.class);
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor);
|
||||
|
||||
assertThat(query.getQueryObject(), is(new BasicQuery("{title: {$regex: '^fun', $options: 'i'}}").getQueryObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-995, DATAMONGO-420
|
||||
*/
|
||||
@Test
|
||||
public void shouldParseQueryWithParametersInExpression() throws Exception {
|
||||
|
||||
ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { 1, 2, 3, 4 });
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithParametersInExpression", int.class,
|
||||
int.class, int.class, int.class);
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor);
|
||||
|
||||
assertThat(query.getQueryObject(), is(new BasicQuery(
|
||||
"{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}").getQueryObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-995, DATAMONGO-420
|
||||
*/
|
||||
@Test
|
||||
public void bindsSimplePropertyAlreadyQuotedCorrectly() throws Exception {
|
||||
|
||||
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews");
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class);
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
|
||||
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}");
|
||||
|
||||
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-995, DATAMONGO-420
|
||||
*/
|
||||
@Test
|
||||
public void bindsSimplePropertyAlreadyQuotedWithRegexCorrectly() throws Exception {
|
||||
|
||||
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "^Mat.*");
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameQuoted", String.class);
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
|
||||
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : '^Mat.*'}");
|
||||
|
||||
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-995, DATAMONGO-420
|
||||
*/
|
||||
@Test
|
||||
public void bindsSimplePropertyWithRegexCorrectly() throws Exception {
|
||||
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastname", String.class);
|
||||
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "^Mat.*");
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
|
||||
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : '^Mat.*'}");
|
||||
|
||||
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1072
|
||||
*/
|
||||
@Test
|
||||
public void shouldParseJsonKeyReplacementCorrectly() throws Exception {
|
||||
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("methodWithPlaceholderInKeyOfJsonStructure", String.class,
|
||||
String.class);
|
||||
ConvertingParameterAccessor parameterAccessor = StubParameterAccessor.getAccessor(converter, "key", "value");
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor);
|
||||
|
||||
assertThat(query.getQueryObject(), is(new BasicDBObjectBuilder().add("key", "value").get()));
|
||||
}
|
||||
|
||||
private StringBasedMongoQuery createQueryForMethod(String name, Class<?>... parameters) throws Exception {
|
||||
|
||||
Method method = SampleRepository.class.getMethod(name, parameters);
|
||||
@@ -170,6 +283,9 @@ public class StringBasedMongoQueryUnitTests {
|
||||
@Query("{ 'lastname' : ?0 }")
|
||||
Person findByLastname(String lastname);
|
||||
|
||||
@Query("{ 'lastname' : '?0' }")
|
||||
Person findByLastnameQuoted(String lastname);
|
||||
|
||||
@Query("{ 'address' : ?0 }")
|
||||
Person findByAddress(Address address);
|
||||
|
||||
@@ -184,5 +300,17 @@ public class StringBasedMongoQueryUnitTests {
|
||||
|
||||
@Query(value = "{ 'lastname' : ?0 }", delete = true, count = true)
|
||||
void invalidMethod(String lastname);
|
||||
|
||||
@Query(value = "?0", fields = "?1")
|
||||
DBObject findByParameterizedCriteriaAndFields(DBObject criteria, Map<String, Integer> fields);
|
||||
|
||||
@Query("{'title': { $regex : '^?0', $options : 'i'}}")
|
||||
List<DBObject> findByTitleBeginsWithExplicitQuoting(String title);
|
||||
|
||||
@Query(value = "{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}")
|
||||
List<DBObject> findByQueryWithParametersInExpression(int param1, int param2, int param3, int param4);
|
||||
|
||||
@Query("{ ?0 : ?1}")
|
||||
Object methodWithPlaceholderInKeyOfJsonStructure(String keyReplacement, String valueReplacement);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ import org.springframework.data.repository.query.ParameterAccessor;
|
||||
class StubParameterAccessor implements MongoParameterAccessor {
|
||||
|
||||
private final Object[] values;
|
||||
private Distance distance;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ConvertingParameterAccessor} backed by a {@link StubParameterAccessor} simply returning the
|
||||
@@ -47,7 +48,14 @@ class StubParameterAccessor implements MongoParameterAccessor {
|
||||
}
|
||||
|
||||
public StubParameterAccessor(Object... values) {
|
||||
|
||||
this.values = values;
|
||||
|
||||
for (Object value : values) {
|
||||
if (value instanceof Distance) {
|
||||
this.distance = (Distance) value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -87,7 +95,7 @@ class StubParameterAccessor implements MongoParameterAccessor {
|
||||
* @see org.springframework.data.mongodb.repository.MongoParameterAccessor#getMaxDistance()
|
||||
*/
|
||||
public Distance getMaxDistance() {
|
||||
return null;
|
||||
return distance;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.repository.support;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
@@ -40,8 +42,7 @@ import com.mysema.query.mongodb.MongodbQuery;
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class QuerydslRepositorySupportUnitTests {
|
||||
|
||||
@Autowired
|
||||
MongoOperations operations;
|
||||
@Autowired MongoOperations operations;
|
||||
Person person;
|
||||
|
||||
@Before
|
||||
@@ -54,9 +55,26 @@ public class QuerydslRepositorySupportUnitTests {
|
||||
@Test
|
||||
public void providesMongoQuery() {
|
||||
QPerson p = QPerson.person;
|
||||
QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {
|
||||
};
|
||||
QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {};
|
||||
MongodbQuery<Person> query = support.from(p).where(p.lastname.eq("Matthews"));
|
||||
assertThat(query.uniqueResult(), is(person));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1063
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowAny() {
|
||||
|
||||
person.setSkills(Arrays.asList("vocalist", "songwriter", "guitarist"));
|
||||
|
||||
operations.save(person);
|
||||
|
||||
QPerson p = QPerson.person;
|
||||
QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {};
|
||||
|
||||
MongodbQuery<Person> query = support.from(p).where(p.skills.any().in("guitarist"));
|
||||
|
||||
assertThat(query.uniqueResult(), is(person));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,8 +17,10 @@ package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.*;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
@@ -32,6 +34,7 @@ import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.repository.QAddress;
|
||||
import org.springframework.data.mongodb.repository.QPerson;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mysema.query.types.expr.BooleanOperation;
|
||||
@@ -43,6 +46,7 @@ import com.mysema.query.types.path.StringPath;
|
||||
* Unit tests for {@link SpringDataMongodbSerializer}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class SpringDataMongodbSerializerUnitTests {
|
||||
@@ -132,6 +136,41 @@ public class SpringDataMongodbSerializerUnitTests {
|
||||
assertThat(path, is("0"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-969
|
||||
*/
|
||||
@Test
|
||||
public void shouldConvertObjectIdEvenWhenNestedInOperatorDbObject() {
|
||||
|
||||
ObjectId value = new ObjectId("53bb9fd14438765b29c2d56e");
|
||||
DBObject serialized = serializer.asDBObject("_id", new BasicDBObject("$ne", value.toString()));
|
||||
|
||||
DBObject _id = getAsDBObject(serialized, "_id");
|
||||
ObjectId $ne = getTypedValue(_id, "$ne", ObjectId.class);
|
||||
assertThat($ne, is(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-969
|
||||
*/
|
||||
@Test
|
||||
public void shouldConvertCollectionOfObjectIdEvenWhenNestedInOperatorDbObject() {
|
||||
|
||||
ObjectId firstId = new ObjectId("53bb9fd14438765b29c2d56e");
|
||||
ObjectId secondId = new ObjectId("53bb9fda4438765b29c2d56f");
|
||||
|
||||
BasicDBList objectIds = new BasicDBList();
|
||||
objectIds.add(firstId.toString());
|
||||
objectIds.add(secondId.toString());
|
||||
|
||||
DBObject serialized = serializer.asDBObject("_id", new BasicDBObject("$in", objectIds));
|
||||
|
||||
DBObject _id = getAsDBObject(serialized, "_id");
|
||||
Object[] $in = getTypedValue(_id, "$in", Object[].class);
|
||||
|
||||
assertThat($in, Matchers.<Object> arrayContaining(firstId, secondId));
|
||||
}
|
||||
|
||||
class Address {
|
||||
String id;
|
||||
String street;
|
||||
|
||||
@@ -0,0 +1,369 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.test.util;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.junit.rules.TestRule;
|
||||
import org.junit.runner.Description;
|
||||
import org.junit.runners.model.Statement;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* {@link CleanMongoDB} is a junit {@link TestRule} implementation to be used as for wiping data from MongoDB instance.
|
||||
* MongoDB specific system databases like {@literal admin} and {@literal local} remain untouched. The rule will apply
|
||||
* <strong>after</strong> the base {@link Statement}. <br />
|
||||
* Use as {@link org.junit.ClassRule} to wipe data after finishing all tests within a class or as {@link org.junit.Rule}
|
||||
* to do so after each {@link org.junit.Test}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
public class CleanMongoDB implements TestRule {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CleanMongoDB.class);
|
||||
|
||||
/**
|
||||
* Defines contents of MongoDB.
|
||||
*/
|
||||
public enum Struct {
|
||||
DATABASE, COLLECTION, INDEX;
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial")//
|
||||
private Set<String> preserveDatabases = new HashSet<String>() {
|
||||
{
|
||||
add("admin");
|
||||
add("local");
|
||||
}
|
||||
};
|
||||
|
||||
private Set<String> dbNames = new HashSet<String>();
|
||||
private Set<String> collectionNames = new HashSet<String>();
|
||||
private Set<Struct> types = new HashSet<CleanMongoDB.Struct>();
|
||||
private MongoClient client;
|
||||
|
||||
/**
|
||||
* Create new instance using an internal {@link MongoClient}.
|
||||
*/
|
||||
public CleanMongoDB() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new instance using an internal {@link MongoClient} connecting to specified instance running at host:port.
|
||||
*
|
||||
* @param host
|
||||
* @param port
|
||||
* @throws UnknownHostException
|
||||
*/
|
||||
public CleanMongoDB(String host, int port) throws UnknownHostException {
|
||||
this(new MongoClient(host, port));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new instance using the given client.
|
||||
*
|
||||
* @param client
|
||||
*/
|
||||
public CleanMongoDB(MongoClient client) {
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything by dropping every single {@link DB}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static CleanMongoDB everything() {
|
||||
|
||||
CleanMongoDB cleanMongoDB = new CleanMongoDB();
|
||||
cleanMongoDB.clean(Struct.DATABASE);
|
||||
return cleanMongoDB;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything from the databases with given name by dropping the according {@link DB}.
|
||||
*
|
||||
* @param dbNames
|
||||
* @return
|
||||
*/
|
||||
public static CleanMongoDB databases(String... dbNames) {
|
||||
|
||||
CleanMongoDB cleanMongoDB = new CleanMongoDB();
|
||||
cleanMongoDB.clean(Struct.DATABASE);
|
||||
cleanMongoDB.useDatabases(dbNames);
|
||||
return cleanMongoDB;
|
||||
}
|
||||
|
||||
/**
|
||||
* Drops the {@link DBCollection} with given names from every single {@link DB} containing them.
|
||||
*
|
||||
* @param collectionNames
|
||||
* @return
|
||||
*/
|
||||
public static CleanMongoDB collections(String... collectionNames) {
|
||||
return collections("", Arrays.asList(collectionNames));
|
||||
}
|
||||
|
||||
/**
|
||||
* Drops the {@link DBCollection} with given names from the named {@link DB}.
|
||||
*
|
||||
* @param dbName
|
||||
* @param collectionNames
|
||||
* @return
|
||||
*/
|
||||
public static CleanMongoDB collections(String dbName, Collection<String> collectionNames) {
|
||||
|
||||
CleanMongoDB cleanMongoDB = new CleanMongoDB();
|
||||
cleanMongoDB.clean(Struct.COLLECTION);
|
||||
cleanMongoDB.useCollections(dbName, collectionNames);
|
||||
return cleanMongoDB;
|
||||
}
|
||||
|
||||
/**
|
||||
* Drops all index structures from every single {@link DBCollection}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static CleanMongoDB indexes() {
|
||||
return indexes(Collections.<String> emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Drops all index structures from every single {@link DBCollection}.
|
||||
*
|
||||
* @param collectionNames
|
||||
* @return
|
||||
*/
|
||||
public static CleanMongoDB indexes(Collection<String> collectionNames) {
|
||||
|
||||
CleanMongoDB cleanMongoDB = new CleanMongoDB();
|
||||
cleanMongoDB.clean(Struct.INDEX);
|
||||
cleanMongoDB.useCollections(collectionNames);
|
||||
return cleanMongoDB;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define {@link Struct} to be cleaned.
|
||||
*
|
||||
* @param types
|
||||
* @return
|
||||
*/
|
||||
public CleanMongoDB clean(Struct... types) {
|
||||
|
||||
this.types.addAll(Arrays.asList(types));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the {@link DB}s to be used. <br />
|
||||
* Impact along with {@link CleanMongoDB#clean(Struct...)}:
|
||||
* <ul>
|
||||
* <li>{@link Struct#DATABASE}: Forces drop of named databases.</li>
|
||||
* <li>{@link Struct#COLLECTION}: Forces drop of collections within named databases.</li>
|
||||
* <li>{@link Struct#INDEX}: Removes index within collections of named databases.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param dbNames
|
||||
* @return
|
||||
*/
|
||||
public CleanMongoDB useDatabases(String... dbNames) {
|
||||
|
||||
this.dbNames.addAll(Arrays.asList(dbNames));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Excludes the given {@link DB}s from being processed.
|
||||
*
|
||||
* @param dbNames
|
||||
* @return
|
||||
*/
|
||||
public CleanMongoDB preserveDatabases(String... dbNames) {
|
||||
this.preserveDatabases.addAll(Arrays.asList(dbNames));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the {@link DBCollection}s to be used. <br />
|
||||
* Impact along with {@link CleanMongoDB#clean(Struct...)}:
|
||||
* <ul>
|
||||
* <li>{@link Struct#COLLECTION}: Forces drop of named collections.</li>
|
||||
* <li>{@link Struct#INDEX}: Removes index within named collections.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param collectionNames
|
||||
* @return
|
||||
*/
|
||||
public CleanMongoDB useCollections(String... collectionNames) {
|
||||
return useCollections(Arrays.asList(collectionNames));
|
||||
}
|
||||
|
||||
private CleanMongoDB useCollections(Collection<String> collectionNames) {
|
||||
return useCollections("", collectionNames);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the {@link DBCollection}s and {@link DB} to be used. <br />
|
||||
* Impact along with {@link CleanMongoDB#clean(Struct...)}:
|
||||
* <ul>
|
||||
* <li>{@link Struct#COLLECTION}: Forces drop of named collections in given db.</li>
|
||||
* <li>{@link Struct#INDEX}: Removes index within named collections in given db.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param collectionNames
|
||||
* @return
|
||||
*/
|
||||
public CleanMongoDB useCollections(String db, Collection<String> collectionNames) {
|
||||
|
||||
if (StringUtils.hasText(db)) {
|
||||
this.dbNames.add(db);
|
||||
}
|
||||
|
||||
if (!CollectionUtils.isEmpty(collectionNames)) {
|
||||
this.collectionNames.addAll(collectionNames);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
Statement apply() {
|
||||
return apply(null, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.junit.rules.TestRule#apply(org.junit.runners.model.Statement, org.junit.runner.Description)
|
||||
*/
|
||||
public Statement apply(Statement base, Description description) {
|
||||
return new MongoCleanStatement(base);
|
||||
}
|
||||
|
||||
private void doClean() {
|
||||
|
||||
Collection<String> dbNamesToUse = initDbNames();
|
||||
|
||||
for (String dbName : dbNamesToUse) {
|
||||
|
||||
if (isPreserved(dbName) || dropDbIfRequired(dbName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
DB db = client.getDB(dbName);
|
||||
dropCollectionsOrIndexIfRequried(db, initCollectionNames(db));
|
||||
}
|
||||
}
|
||||
|
||||
private boolean dropDbIfRequired(String dbName) {
|
||||
|
||||
if (!types.contains(Struct.DATABASE)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
client.dropDatabase(dbName);
|
||||
LOGGER.debug("Dropping DB '{}'. ", dbName);
|
||||
return true;
|
||||
}
|
||||
|
||||
private void dropCollectionsOrIndexIfRequried(DB db, Collection<String> collectionsToUse) {
|
||||
|
||||
for (String collectionName : collectionsToUse) {
|
||||
|
||||
if (db.collectionExists(collectionName)) {
|
||||
|
||||
DBCollection collection = db.getCollectionFromString(collectionName);
|
||||
if (collection != null) {
|
||||
|
||||
if (types.contains(Struct.COLLECTION)) {
|
||||
collection.drop();
|
||||
LOGGER.debug("Dropping collection '{}' for DB '{}'. ", collectionName, db.getName());
|
||||
} else if (types.contains(Struct.INDEX)) {
|
||||
collection.dropIndexes();
|
||||
LOGGER.debug("Dropping indexes in collection '{}' for DB '{}'. ", collectionName, db.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isPreserved(String dbName) {
|
||||
return preserveDatabases.contains(dbName.toLowerCase());
|
||||
}
|
||||
|
||||
private Collection<String> initDbNames() {
|
||||
|
||||
Collection<String> dbNamesToUse = dbNames;
|
||||
if (dbNamesToUse.isEmpty()) {
|
||||
dbNamesToUse = client.getDatabaseNames();
|
||||
}
|
||||
return dbNamesToUse;
|
||||
}
|
||||
|
||||
private Collection<String> initCollectionNames(DB db) {
|
||||
|
||||
Collection<String> collectionsToUse = collectionNames;
|
||||
if (CollectionUtils.isEmpty(collectionsToUse)) {
|
||||
collectionsToUse = db.getCollectionNames();
|
||||
}
|
||||
return collectionsToUse;
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
private class MongoCleanStatement extends Statement {
|
||||
|
||||
private final Statement base;
|
||||
|
||||
public MongoCleanStatement(Statement base) {
|
||||
this.base = base;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void evaluate() throws Throwable {
|
||||
|
||||
if (base != null) {
|
||||
base.evaluate();
|
||||
}
|
||||
|
||||
boolean isInternal = false;
|
||||
if (client == null) {
|
||||
client = new MongoClient();
|
||||
isInternal = true;
|
||||
}
|
||||
|
||||
doClean();
|
||||
|
||||
if (isInternal) {
|
||||
client.close();
|
||||
client = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.test.util;
|
||||
|
||||
import org.junit.runner.Result;
|
||||
import org.junit.runner.notification.RunListener;
|
||||
import org.springframework.data.mongodb.test.util.CleanMongoDB.Struct;
|
||||
|
||||
/**
|
||||
* {@link RunListener} implementation to be used for wiping MongoDB index structures after all test runs have finished.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
*/
|
||||
public class CleanMongoDBJunitRunListener extends RunListener {
|
||||
|
||||
@Override
|
||||
public void testRunFinished(Result result) throws Exception {
|
||||
|
||||
super.testRunFinished(result);
|
||||
try {
|
||||
new CleanMongoDB().clean(Struct.INDEX).apply().evaluate();
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,194 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.test.util;
|
||||
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.Description;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.model.Statement;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.test.util.CleanMongoDB.Struct;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class CleanMongoDBTests {
|
||||
|
||||
private CleanMongoDB cleaner;
|
||||
|
||||
// JUnit internals
|
||||
private @Mock Statement baseStatementMock;
|
||||
private @Mock Description descriptionMock;
|
||||
|
||||
// MongoClient in use
|
||||
private @Mock MongoClient mongoClientMock;
|
||||
|
||||
// Some Mock DBs
|
||||
private @Mock DB db1mock, db2mock;
|
||||
private @Mock DBCollection db1collection1mock, db1collection2mock, db2collection1mock;
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
// DB setup
|
||||
when(mongoClientMock.getDatabaseNames()).thenReturn(Arrays.asList("admin", "db1", "db2"));
|
||||
when(mongoClientMock.getDB(eq("db1"))).thenReturn(db1mock);
|
||||
when(mongoClientMock.getDB(eq("db2"))).thenReturn(db2mock);
|
||||
|
||||
// collections have to exist
|
||||
when(db1mock.collectionExists(anyString())).thenReturn(true);
|
||||
when(db2mock.collectionExists(anyString())).thenReturn(true);
|
||||
|
||||
// init collection names per database
|
||||
when(db1mock.getCollectionNames()).thenReturn(new HashSet<String>() {
|
||||
{
|
||||
add("db1collection1");
|
||||
add("db1collection2");
|
||||
}
|
||||
});
|
||||
when(db2mock.getCollectionNames()).thenReturn(Collections.singleton("db2collection1"));
|
||||
|
||||
// return collections according to names
|
||||
when(db1mock.getCollectionFromString(eq("db1collection1"))).thenReturn(db1collection1mock);
|
||||
when(db1mock.getCollectionFromString(eq("db1collection2"))).thenReturn(db1collection2mock);
|
||||
when(db2mock.getCollectionFromString(eq("db2collection1"))).thenReturn(db2collection1mock);
|
||||
|
||||
cleaner = new CleanMongoDB(mongoClientMock);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void preservesSystemDBsCorrectlyWhenCleaningDatabase() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.DATABASE);
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("admin"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void preservesNamedDBsCorrectlyWhenCleaningDatabase() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.DATABASE);
|
||||
cleaner.preserveDatabases("db1");
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("db1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void dropsAllDBsCorrectlyWhenCleaingDatabaseAndNotExplictDBNamePresent() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.DATABASE);
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(mongoClientMock, times(1)).dropDatabase(eq("db1"));
|
||||
verify(mongoClientMock, times(1)).dropDatabase(eq("db2"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void dropsSpecifiedDBsCorrectlyWhenExplicitNameSet() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.DATABASE);
|
||||
cleaner.useDatabases("db2");
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(mongoClientMock, times(1)).dropDatabase(eq("db2"));
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("db1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void doesNotRemoveAnyDBwhenCleaningCollections() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.COLLECTION);
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("db1"));
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("db2"));
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("admin"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void doesNotDropCollectionsFromPreservedDBs() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.COLLECTION);
|
||||
cleaner.preserveDatabases("db1");
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(db1collection1mock, never()).drop();
|
||||
verify(db1collection2mock, never()).drop();
|
||||
verify(db2collection1mock, times(1)).drop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void removesAllCollectionsFromAllDatabasesWhenNotLimitedToSpecificOnes() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.COLLECTION);
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(db1collection1mock, times(1)).drop();
|
||||
verify(db1collection2mock, times(1)).drop();
|
||||
verify(db2collection1mock, times(1)).drop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void removesOnlyNamedCollectionsWhenSpecified() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.COLLECTION);
|
||||
cleaner.useCollections("db1collection2");
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(db1collection1mock, never()).drop();
|
||||
verify(db2collection1mock, never()).drop();
|
||||
verify(db1collection2mock, times(1)).drop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void removesIndexesCorrectly() throws Throwable {
|
||||
|
||||
cleaner.clean(Struct.INDEX);
|
||||
|
||||
cleaner.apply(baseStatementMock, descriptionMock).evaluate();
|
||||
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("db1"));
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("db2"));
|
||||
verify(mongoClientMock, never()).dropDatabase(eq("admin"));
|
||||
|
||||
verify(db1collection1mock, times(1)).dropIndexes();
|
||||
}
|
||||
}
|
||||
@@ -8,7 +8,7 @@ Export-Template:
|
||||
org.springframework.data.mongodb.*;version="${project.version}"
|
||||
Import-Template:
|
||||
com.google.common.base.*;version="[11.0.0,14.0.0)";resolution:=optional,
|
||||
com.mongodb.*;version="${mongo:[=.=.=,+1.0.0)}",
|
||||
com.mongodb.*;version="${mongo.osgi:[=.=.=,+1.0.0)}",
|
||||
com.mysema.query.*;version="[2.1.1, 3.0.0)";resolution:=optional,
|
||||
javax.annotation.processing.*;version="0",
|
||||
javax.enterprise.*;version="${cdi:[=.=.=,+1.0.0)}";resolution:=optional,
|
||||
|
||||
@@ -68,7 +68,7 @@
|
||||
<xi:include href="introduction/introduction.xml"/>
|
||||
<xi:include href="introduction/requirements.xml"/>
|
||||
<xi:include href="introduction/getting-started.xml"/>
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.8.0.RELEASE/src/docbkx/repositories.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.8.2.RELEASE/src/docbkx/repositories.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repositories.xml" />
|
||||
</xi:include>
|
||||
</part>
|
||||
@@ -88,10 +88,10 @@
|
||||
<part id="appendix">
|
||||
<title>Appendix</title>
|
||||
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.8.0.RELEASE/src/docbkx/repository-namespace-reference.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.8.2.RELEASE/src/docbkx/repository-namespace-reference.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repository-namespace-reference.xml" />
|
||||
</xi:include>
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.8.0.RELEASE/src/docbkx/repository-query-keywords-reference.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.8.2.RELEASE/src/docbkx/repository-query-keywords-reference.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repository-query-keywords-reference.xml" />
|
||||
</xi:include>
|
||||
</part>
|
||||
|
||||
@@ -18,20 +18,19 @@
|
||||
<section id="get-started:help:community">
|
||||
<title>Community Forum</title>
|
||||
|
||||
<para>The Spring Data <ulink
|
||||
url="http://forum.spring.io/forum/spring-projects/data">forum
|
||||
</ulink> is a message board for all Spring Data (not just Document)
|
||||
users to share information and help each other. Note that registration
|
||||
is needed <emphasis>only</emphasis> for posting.</para>
|
||||
<para>Spring Data on Stackoverflow <ulink
|
||||
url="http://stackoverflow.com/questions/tagged/spring-data">Stackoverflow
|
||||
</ulink> is a tag for all Spring Data (not just Document) users to share
|
||||
information and help each other. Note that registration is needed
|
||||
<emphasis>only</emphasis> for posting.</para>
|
||||
</section>
|
||||
|
||||
<section id="get-started:help:professional">
|
||||
<title>Professional Support</title>
|
||||
|
||||
<para>Professional, from-the-source support, with guaranteed response
|
||||
time, is available from <ulink
|
||||
url="http://gopivotal.com/">Pivotal Sofware, Inc.</ulink>, the company
|
||||
behind Spring Data and Spring.</para>
|
||||
time, is available from <ulink url="http://gopivotal.com/">Pivotal
|
||||
Sofware, Inc.</ulink>, the company behind Spring Data and Spring.</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
@@ -44,22 +43,21 @@
|
||||
homepage</ulink>.</para>
|
||||
|
||||
<para>You can help make Spring Data best serve the needs of the Spring
|
||||
community by interacting with developers through the Spring Community
|
||||
<ulink url="http://forum.spring.io/">forums</ulink>. To follow
|
||||
developer activity look for the mailing list information on the Spring
|
||||
Data Mongo homepage.</para>
|
||||
community by interacting with developers through the Community on <ulink
|
||||
url="http://stackoverflow.com/questions/tagged/spring-data">Stackoverflow</ulink>.
|
||||
To follow developer activity look for the mailing list information on the
|
||||
Spring Data Mongo homepage.</para>
|
||||
|
||||
<para>If you encounter a bug or want to suggest an improvement, please
|
||||
create a ticket on the Spring Data issue <ulink
|
||||
url="https://jira.springframework.org/browse/DATAMONGO">tracker</ulink>.</para>
|
||||
url="https://jira.spring.io/browse/DATAMONGO">tracker</ulink>.</para>
|
||||
|
||||
<para>To stay up to date with the latest news and announcements in the
|
||||
Spring eco system, subscribe to the Spring Community <ulink
|
||||
url="https://spring.io">Portal</ulink>.</para>
|
||||
|
||||
<para>Lastly, you can follow the SpringSource Data <ulink
|
||||
url="https://spring.io/blog">blog </ulink>or
|
||||
the project team on Twitter (<ulink
|
||||
url="http://twitter.com/SpringData">SpringData</ulink>)</para>
|
||||
url="https://spring.io/blog">blog </ulink>or the project team on Twitter
|
||||
(<ulink url="http://twitter.com/SpringData">SpringData</ulink>).</para>
|
||||
</section>
|
||||
</chapter>
|
||||
|
||||
@@ -1,6 +1,371 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 1.5.6.RELEASE (2015-07-01)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1246 - Release 1.5.6 (Dijkstra).
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0.
|
||||
|
||||
|
||||
Changes in version 1.6.3.RELEASE (2015-07-01)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1247 - Release 1.6.3 (Evans).
|
||||
* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile.
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation.
|
||||
* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release.
|
||||
* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types.
|
||||
* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0.
|
||||
* DATAMONGO-1153 - Fix documentation build.
|
||||
* DATAMONGO-1133 - Field aliasing is not honored in Aggregation operations.
|
||||
* DATAMONGO-1124 - Switch log level for cyclic reference index warnings from WARN to INFO.
|
||||
* DATAMONGO-1081 - Improve documentation on field mapping semantics.
|
||||
|
||||
|
||||
Changes in version 1.7.1.RELEASE (2015-06-30)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1248 - Release 1.7.1 (Fowler).
|
||||
* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile.
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1.
|
||||
* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation.
|
||||
* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality.
|
||||
* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1202 - Indexed annotation problems under generics.
|
||||
* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release.
|
||||
* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types.
|
||||
|
||||
|
||||
Changes in version 1.8.0.M1 (2015-06-02)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1228 - Release 1.8 M1 (Gosling).
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1218 - Deprecate non-MongoClient related configuration options in XML namespace.
|
||||
* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1.
|
||||
* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation.
|
||||
* DATAMONGO-1211 - Adapt API changes in Spring Data Commons to simplify custom repository base class registration.
|
||||
* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality.
|
||||
* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1202 - Indexed annotation problems under generics.
|
||||
* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release.
|
||||
* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver.
|
||||
* DATAMONGO-1192 - Switch back to Spring 4.1's CollectionFactory.
|
||||
* DATAMONGO-1134 - Add support for $geoIntersects.
|
||||
* DATAMONGO-990 - Add support for SpEL expressions in @Query.
|
||||
|
||||
|
||||
Changes in version 1.7.0.RELEASE (2015-03-23)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1189 - Release 1.7 GA.
|
||||
* DATAMONGO-1181 - Add Jackson Module for GeoJSON types.
|
||||
* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery.
|
||||
* DATAMONGO-1179 - Update reference documentation.
|
||||
* DATAMONGO-1124 - Switch log level for cyclic reference index warnings from WARN to INFO.
|
||||
* DATAMONGO-979 - Add support for $size expression in project and group aggregation pipeline.
|
||||
|
||||
|
||||
Changes in version 1.7.0.RC1 (2015-03-05)
|
||||
-----------------------------------------
|
||||
* DATAMONGO-1173 - Release 1.7 RC1.
|
||||
* DATAMONGO-1167 - Add 'findAll' method to QueryDslMongoRepository which accepts a querydsl Predicate and a Sort.
|
||||
* DATAMONGO-1165 - Add support for Java 8 Stream as return type in repositories.
|
||||
* DATAMONGO-1162 - Adapt test cases to semantic changes in Spring Data Commons AuditingHandler API.
|
||||
* DATAMONGO-1158 - Assert compatibility with MongoDB 3.0.
|
||||
* DATAMONGO-1154 - Upgrade to MongoDB Java driver 2.13.0.
|
||||
* DATAMONGO-1153 - Fix documentation build.
|
||||
* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR.
|
||||
* DATAMONGO-1147 - Remove manual array copy.
|
||||
* DATAMONGO-1146 - Add 'exists' method to QueryDslMongoRepository which accepts a querydsl Predicate.
|
||||
* DATAMONGO-1145 - Upgrade MongoDB Java driver to 2.12.5.
|
||||
* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance.
|
||||
* DATAMONGO-1136 - Use $geoWithin instead of $within for geo queries.
|
||||
* DATAMONGO-1135 - Add support for $geometry to support GeoJSON queries.
|
||||
* DATAMONGO-1132 - The sample does not match the logical result in the MongoDB repositories section of the documentation.
|
||||
* DATAMONGO-1131 - Register converters for ThreeTen back port by default.
|
||||
* DATAMONGO-1129 - Upgrade to latest MongoDB Java driver.
|
||||
* DATAMONGO-1127 - Add support for geoNear queries with distance information.
|
||||
* DATAMONGO-1126 - Repository keyword query findByInId with pageable not returning correctly.
|
||||
* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents.
|
||||
* DATAMONGO-1121 - "Cycle found" false positive.
|
||||
* DATAMONGO-1120 - Pageable queries timeout or return incorrect counts.
|
||||
* DATAMONGO-1118 - Custom converters not used for map keys.
|
||||
* DATAMONGO-1110 - Add support for $minDistance to NearQuery.
|
||||
* DATAMONGO-1082 - Improve JavaDoc and reference documentation on alias usage in aggregation framework support.
|
||||
* DATAMONGO-1081 - Improve documentation on field mapping semantics.
|
||||
* DATAMONGO-712 - Another round of potential performance improvements.
|
||||
* DATAMONGO-479 - Support calling of MongoDB stored javascripts.
|
||||
|
||||
|
||||
Changes in version 1.6.2.RELEASE (2015-01-28)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR.
|
||||
* DATAMONGO-1147 - Remove manual array copy.
|
||||
* DATAMONGO-1145 - Upgrade MongoDB Java driver to 2.12.5.
|
||||
* DATAMONGO-1144 - Release 1.6.2.
|
||||
* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance.
|
||||
* DATAMONGO-1132 - The sample does not match the logical result in the MongoDB repositories section of the documentation.
|
||||
* DATAMONGO-1127 - Add support for geoNear queries with distance information.
|
||||
* DATAMONGO-1126 - Repository keyword query findByInId with pageable not returning correctly.
|
||||
* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents.
|
||||
* DATAMONGO-1121 - "Cycle found" false positive.
|
||||
* DATAMONGO-1120 - Pageable queries timeout or return incorrect counts.
|
||||
* DATAMONGO-1118 - Custom converters not used for map keys.
|
||||
* DATAMONGO-1108 - BasicMongoPersistentEntity doesn't need to parse expression on every invocation.
|
||||
* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate.
|
||||
* DATAMONGO-1094 - Wrong reference to @DocumentField in error message.
|
||||
* DATAMONGO-1093 - BasicQuery missing hashCode() and equals(…) methods.
|
||||
* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field...
|
||||
* DATAMONGO-1085 - Sort can not use the metamodel classes generated by QueryDSL.
|
||||
* DATAMONGO-1082 - Improve JavaDoc and reference documentation on alias usage in aggregation framework support.
|
||||
* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure.
|
||||
* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties.
|
||||
* DATAMONGO-1054 - Improve performance of saving entities by using insert(…) if possible.
|
||||
* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions.
|
||||
* DATAMONGO-712 - Another round of potential performance improvements.
|
||||
|
||||
|
||||
Changes in version 1.5.5.RELEASE (2015-01-27)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR.
|
||||
* DATAMONGO-1147 - Remove manual array copy.
|
||||
* DATAMONGO-1143 - Release 1.5.5.
|
||||
* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance.
|
||||
* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents.
|
||||
* DATAMONGO-1121 - "Cycle found" false positive.
|
||||
* DATAMONGO-1118 - Custom converters not used for map keys.
|
||||
* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate.
|
||||
* DATAMONGO-1094 - Wrong reference to @DocumentField in error message.
|
||||
* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field...
|
||||
* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure.
|
||||
* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties.
|
||||
* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted.
|
||||
* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria.
|
||||
* DATAMONGO-1063 - IllegalStateException using any().in().
|
||||
* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests.
|
||||
* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior.
|
||||
* DATAMONGO-1045 - Make sure Spring Data MongoDB can build against Spring 4.1.
|
||||
* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions.
|
||||
* DATAMONGO-1040 - deleteAll repository query don't use EntityMetadata collection name.
|
||||
* DATAMONGO-1039 - Polish implementation for cleaning up after tests.
|
||||
* DATAMONGO-712 - Another round of potential performance improvements.
|
||||
|
||||
|
||||
Changes in version 1.7.0.M1 (2014-12-01)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1108 - BasicMongoPersistentEntity doesn't need to parse expression on every invocation.
|
||||
* DATAMONGO-1106 - Release 1.7 M1.
|
||||
* DATAMONGO-1105 - Add implementation for new QueryDslPredicateExecutor.findAll(OrderSpecifier<?>... orders).
|
||||
* DATAMONGO-1102 - Auto-register JSR-310 converters to support JDK 8 date/time types.
|
||||
* DATAMONGO-1101 - Add support for $bit to Update.
|
||||
* DATAMONGO-1100 - Adapt to new PersistentPropertyAccessor API.
|
||||
* DATAMONGO-1097 - Add support for $mul to Update.
|
||||
* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate.
|
||||
* DATAMONGO-1094 - Wrong reference to @DocumentField in error message.
|
||||
* DATAMONGO-1093 - BasicQuery missing hashCode() and equals(…) methods.
|
||||
* DATAMONGO-1092 - Ensure compatibility with MongoDB 2.8.0.rc0 and java driver 2.13.0-rc0.
|
||||
* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field…
|
||||
* DATAMONGO-1085 - Sort can not use the metamodel classes generated by QueryDSL.
|
||||
* DATAMONGO-1080 - AbstractMongoQuery must not eagerly post-process results.
|
||||
* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure.
|
||||
* DATAMONGO-1077 - Update removes positional operator $ in key when used on DBRef property.
|
||||
* DATAMONGO-1076 - Finalizer hit db on lazy dbrefs.
|
||||
* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties.
|
||||
* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted.
|
||||
* DATAMONGO-1070 - Query annotation with $oid leads to a parse error.
|
||||
* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria.
|
||||
* DATAMONGO-1063 - IllegalStateException using any().in().
|
||||
* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests.
|
||||
* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior.
|
||||
* DATAMONGO-1057 - AbstractMongoQuery.SlicedExecution#execute() skips every nth element.
|
||||
* DATAMONGO-1054 - Improve performance of saving entities by using insert(…) if possible.
|
||||
* DATAMONGO-1053 - In 1.6, any field in a mapped object named "language" will fail to map if it is a type other than String.
|
||||
* DATAMONGO-1050 - SimpleMongoRepository.findById(id, class) don't return ids for nested documents.
|
||||
* DATAMONGO-1049 - Reserved field name 'language' causes trouble.
|
||||
* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions.
|
||||
* DATAMONGO-943 - Add support for $position to Update $push $each.
|
||||
|
||||
|
||||
Changes in version 1.6.1.RELEASE (2014-10-30)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1080 - AbstractMongoQuery must not eagerly post-process results.
|
||||
* DATAMONGO-1079 - Release 1.6.1.
|
||||
* DATAMONGO-1077 - Update removes positional operator $ in key when used on DBRef property.
|
||||
* DATAMONGO-1076 - Finalizer hit db on lazy dbrefs.
|
||||
* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted.
|
||||
* DATAMONGO-1070 - Query annotation with $oid leads to a parse error.
|
||||
* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria.
|
||||
* DATAMONGO-1063 - IllegalStateException using any().in().
|
||||
* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests.
|
||||
* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior.
|
||||
* DATAMONGO-1057 - AbstractMongoQuery.SlicedExecution#execute() skips every nth element.
|
||||
* DATAMONGO-1053 - In 1.6, any field in a mapped object named "language" will fail to map if it is a type other than String.
|
||||
* DATAMONGO-1049 - Reserved field name 'language' causes trouble.
|
||||
|
||||
|
||||
Changes in version 1.6.0.RELEASE (2014-09-05)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1046 - Release 1.6 GA.
|
||||
* DATAMONGO-1045 - Make sure Spring Data MongoDB can build against Spring 4.1.
|
||||
* DATAMONGO-1040 - deleteAll repository query don't use EntityMetadata collection name.
|
||||
* DATAMONGO-1039 - Polish implementation for cleaning up after tests.
|
||||
* DATAMONGO-1038 - Assert Mongo instances cleand up properly after test runs.
|
||||
* DATAMONGO-1036 - Custom repository implementations are not picked up when using CDI.
|
||||
* DATAMONGO-1034 - Improve error message when trying to convert incompatible types.
|
||||
* DATAMONGO-1032 - Polish Asciidoctor documentation.
|
||||
* DATAMONGO-1030 - Query methods retuning a single entity does not work with projecting types.
|
||||
* DATAMONGO-1027 - Collection inherits complex index from embedded class/object.
|
||||
* DATAMONGO-1025 - Duplicate index creation on embedded documents.
|
||||
|
||||
|
||||
Changes in version 1.5.4.RELEASE (2014-08-27)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1038 - Assert Mongo instances cleand up properly after test runs.
|
||||
* DATAMONGO-1034 - Improve error message when trying to convert incompatible types.
|
||||
* DATAMONGO-1033 - Release 1.5.4.
|
||||
* DATAMONGO-1030 - Query methods retuning a single entity does not work with projecting types.
|
||||
* DATAMONGO-1027 - Collection inherits complex index from embedded class/object.
|
||||
* DATAMONGO-1025 - Duplicate index creation on embedded documents.
|
||||
* DATAMONGO-1020 - LimitOperator should be a public class.
|
||||
* DATAMONGO-1008 - IndexOperations fail, when "2dsphere" index is present.
|
||||
|
||||
|
||||
Changes in version 1.6.0.RC1 (2014-08-13)
|
||||
-----------------------------------------
|
||||
* DATAMONGO-1024 - Upgrade to Java driver 2.12.3.
|
||||
* DATAMONGO-1021 - Release 1.6 RC1.
|
||||
* DATAMONGO-1020 - LimitOperator should be a public class.
|
||||
* DATAMONGO-1019 - Correct examples in reference documentation.
|
||||
* DATAMONGO-1017 - Add support for custom implementations in CDI repositories.
|
||||
* DATAMONGO-1016 - Remove deprecations in geospatial area.
|
||||
* DATAMONGO-1015 - Move to Asciidoctor for reference documentation.
|
||||
* DATAMONGO-1012 - Proxies for lazy DBRefs with field access should have their id values resolved eagerly.
|
||||
* DATAMONGO-1009 - Adapt to new multi-store configuration detection.
|
||||
* DATAMONGO-1008 - IndexOperations fail, when "2dsphere" index is present.
|
||||
* DATAMONGO-1005 - Improve cycle-detection for DbRef's.
|
||||
* DATAMONGO-1002 - Update.toString(…) might throw exception.
|
||||
* DATAMONGO-1001 - Can't save/update lazy load object.
|
||||
* DATAMONGO-999 - Multiple Mongo Instances always have the same MongoOption Reference - MongoOptionsFactoryBean has a static instance of MongoOptions.
|
||||
* DATAMONGO-996 - Pagination broken after introduction of the support for top/first.
|
||||
* DATAMONGO-995 - Parameter binding in String-based query does not bind all parameters.
|
||||
* DATAMONGO-993 - The system variables $$CURRENT and $$ROOT not handled correctly.
|
||||
* DATAMONGO-992 - Entity can't be deserialized if @TypeAlias is used.
|
||||
* DATAMONGO-991 - Adapt to deprecation removals in Spring Data Commons.
|
||||
* DATAMONGO-989 - MatchOperation should accept CriteriaDefinition.
|
||||
* DATAMONGO-987 - Problem with lazy loading in @DBRef when getting data using MongoTemplate.
|
||||
* DATAMONGO-974 - synthetic field target's name is returned instead of the alias name.
|
||||
* DATAMONGO-973 - Add support for deriving full text queries.
|
||||
* DATAMONGO-957 - Add support for query modifiers.
|
||||
* DATAMONGO-420 - Extra quotes being added to @Query values and fields.
|
||||
|
||||
|
||||
Changes in version 1.5.2.RELEASE (2014-07-28)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1007 - Release 1.5.2.
|
||||
* DATAMONGO-1002 - Update.toString(…) might throw exception.
|
||||
* DATAMONGO-1001 - Can't save/update lazy load object.
|
||||
* DATAMONGO-999 - Multiple Mongo Instances always have the same MongoOption Reference - MongoOptionsFactoryBean has a static instance of MongoOptions.
|
||||
* DATAMONGO-995 - Parameter binding in String-based query does not bind all parameters.
|
||||
* DATAMONGO-992 - Entity can't be deserialized if @TypeAlias is used.
|
||||
* DATAMONGO-989 - MatchOperation should accept CriteriaDefinition.
|
||||
* DATAMONGO-987 - Problem with lazy loading in @DBRef when getting data using MongoTemplate.
|
||||
* DATAMONGO-983 - Remove links to forum.spring.io.
|
||||
* DATAMONGO-982 - Assure compatibility with upcoming MongoDB driver versions.
|
||||
* DATAMONGO-978 - deleteBy/removeBy repository methods don't set type information in Before/AfterDeleteEvent.
|
||||
* DATAMONGO-972 - References are not handled properly in Querydsl integration.
|
||||
* DATAMONGO-969 - String @id field is not mapped to ObjectId when using QueryDSL .id.in(Collection<String>).
|
||||
* DATAMONGO-420 - Extra quotes being added to @Query values and fields.
|
||||
|
||||
|
||||
Changes in version 1.6.0.M1 (2014-07-10)
|
||||
----------------------------------------
|
||||
* DATAMONGO-983 - Remove links to forum.spring.io.
|
||||
* DATAMONGO-982 - Assure compatibility with upcoming MongoDB driver versions.
|
||||
* DATAMONGO-981 - Release 1.6 M1.
|
||||
* DATAMONGO-980 - Use meta annotations from spring data commons for @Score.
|
||||
* DATAMONGO-978 - deleteBy/removeBy repository methods don't set type information in Before/AfterDeleteEvent.
|
||||
* DATAMONGO-977 - Adapt to Spring 4 upgrade.
|
||||
* DATAMONGO-976 - Add support for reading $meta projection on textScore into document.
|
||||
* DATAMONGO-975 - Add support for date/time operators in aggregation framework.
|
||||
* DATAMONGO-973 - Add support for deriving full text queries.
|
||||
* DATAMONGO-972 - References are not handled properly in Querydsl integration.
|
||||
* DATAMONGO-970 - Id query cannot be created if object to remove is DBObject.
|
||||
* DATAMONGO-969 - String @id field is not mapped to ObjectId when using QueryDSL .id.in(Collection<String>).
|
||||
* DATAMONGO-968 - Add support for $meta projections and sorting for textScore metadata.
|
||||
* DATAMONGO-963 - Compound index with expireAfterSeconds causes repeating error on mongodb server.
|
||||
* DATAMONGO-962 - “Cycle found” with Spring Data Mongo 1.5.
|
||||
* DATAMONGO-960 - Allow to pass options to the Aggregation Pipeline.
|
||||
* DATAMONGO-958 - Move to FieldNamingStrategy SPI in Spring Data Commons.
|
||||
* DATAMONGO-954 - Add support for System Variables in Aggregations.
|
||||
* DATAMONGO-953 - Update object should have a proper equals/hashcode/toString.
|
||||
* DATAMONGO-952 - @Query annotation does not work with only field restrictions.
|
||||
* DATAMONGO-950 - Add support for limiting the query result in the query derivation mechanism.
|
||||
* DATAMONGO-949 - CyclicPropertyReferenceException in versions 1.5.0 + for MongoDB.
|
||||
* DATAMONGO-948 - Assertion error in MongoTemplate.getMappedSortObject.
|
||||
* DATAMONGO-944 - Add support $currentDate to Update.
|
||||
* DATAMONGO-938 - Exception when creating geo within Criteria using MapReduce.
|
||||
* DATAMONGO-937 - Add support for creating text index.
|
||||
* DATAMONGO-850 - Add support for text search using $text.
|
||||
* DATAMONGO-745 - @Query($in) and Pageable in result Page total = 0.
|
||||
|
||||
|
||||
Changes in version 1.5.1.RELEASE (2014-06-30)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-971 - Release 1.5.1.
|
||||
* DATAMONGO-970 - Id query cannot be created if object to remove is DBObject.
|
||||
* DATAMONGO-963 - Compound index with expireAfterSeconds causes repeating error on mongodb server.
|
||||
* DATAMONGO-962 - “Cycle found” with Spring Data Mongo 1.5.
|
||||
* DATAMONGO-953 - Update object should have a proper equals/hashcode/toString.
|
||||
* DATAMONGO-952 - @Query annotation does not work with only field restrictions.
|
||||
* DATAMONGO-949 - CyclicPropertyReferenceException in versions 1.5.0 + for MongoDB.
|
||||
* DATAMONGO-948 - Assertion error in MongoTemplate.getMappedSortObject.
|
||||
* DATAMONGO-938 - Exception when creating geo within Criteria using MapReduce.
|
||||
* DATAMONGO-745 - @Query($in) and Pageable in result Page total = 0.
|
||||
|
||||
|
||||
Changes in version 1.4.3.RELEASE (2014-06-18)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-955 - Release 1.4.3.
|
||||
* DATAMONGO-953 - Update object should have a proper equals/hashcode/toString.
|
||||
* DATAMONGO-952 - @Query annotation does not work with only field restrictions.
|
||||
* DATAMONGO-948 - Assertion error in MongoTemplate.getMappedSortObject.
|
||||
* DATAMONGO-938 - Exception when creating geo within Criteria using MapReduce.
|
||||
* DATAMONGO-924 - Aggregation not working with as() method in project() pipeline operator.
|
||||
* DATAMONGO-920 - Fix debug messages for delete events in AbstractMongoEventListener.
|
||||
* DATAMONGO-917 - DefaultDbRefResolver throws NPE when bundled into an uberjar.
|
||||
* DATAMONGO-914 - Improve resolving of LazyLoading proxies for classes that override equals/hashcode.
|
||||
* DATAMONGO-913 - Can't query using lazy DBRef objects.
|
||||
* DATAMONGO-912 - Aggregation#project followed by Aggregation#match with custom converter causes IllegalArgumentException.
|
||||
* DATAMONGO-898 - MapReduce seems not to work when javascript not being escaped.
|
||||
* DATAMONGO-847 - Allow usage of Criteria within Update.
|
||||
* DATAMONGO-745 - @Query($in) and Pageable in result Page total = 0.
|
||||
* DATAMONGO-647 - Using "OrderBy" in "query by method name" ignores the @Field annotation for field alias.
|
||||
|
||||
|
||||
Changes in version 1.5.0.RELEASE (2014-05-20)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-936 - Release 1.5 GA.
|
||||
@@ -361,7 +726,7 @@ Changes in version 1.2.2.GA (2013-07-19)
|
||||
Changes in version 1.3.0.M1 (2013-06-04)
|
||||
----------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-571] - Spring Data for MongoDb doesn't save null values when @Version is added to domain class
|
||||
* [DATAMONGO-571] - Spring Data for MongoDb doesn't save null values when @Version is added to domain class
|
||||
* [DATAMONGO-612] - Fix PDF reference documentation name
|
||||
* [DATAMONGO-613] - Images missing from reference documentation
|
||||
* [DATAMONGO-617] - NullPointerException in MongoTemplate.initializeVersionProperty(…)
|
||||
@@ -397,7 +762,7 @@ Changes in version 1.3.0.M1 (2013-06-04)
|
||||
* [DATAMONGO-637] - Typo in Query.query(…)
|
||||
* [DATAMONGO-651] - WriteResult not available from thrown Exception
|
||||
* [DATAMONGO-652] - Add support for elemMatch and positional operator projections
|
||||
* [DATAMONGO-656] - Potential NullPointerException when debugging in MongoTemplate
|
||||
* [DATAMONGO-656] - Potential NullPointerException when debugging in MongoTemplate
|
||||
* [DATAMONGO-657] - Allow to write Map value as DBRef
|
||||
* [DATAMONGO-666] - Fix architecture inconsistency created by MongoDataIntegrityViolationException
|
||||
* [DATAMONGO-680] - SimpleMongoRepository.exists(ID) improvement
|
||||
@@ -419,7 +784,7 @@ Changes in version 1.3.0.M1 (2013-06-04)
|
||||
Changes in version 1.2.1.GA (2013-04-17)
|
||||
----------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-571] - Spring Data for MongoDb doesn't save null values when @Version is added to domain class
|
||||
* [DATAMONGO-571] - Spring Data for MongoDb doesn't save null values when @Version is added to domain class
|
||||
* [DATAMONGO-612] - Fix PDF reference documentation name
|
||||
* [DATAMONGO-613] - Images missing from reference documentation
|
||||
* [DATAMONGO-617] - NullPointerException in MongoTemplate.initializeVersionProperty(…)
|
||||
@@ -516,7 +881,7 @@ Changes in version 1.1.1.GA (2012-10-17)
|
||||
Changes in version 1.1.0.GA (2012-10-10)
|
||||
----------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-523] - @TypeAlias annotation not used with AbstractMongoConfiguration
|
||||
* [DATAMONGO-523] - @TypeAlias annotation not used with AbstractMongoConfiguration
|
||||
* [DATAMONGO-527] - Criteria.equals(…) broken for complex criterias
|
||||
* [DATAMONGO-530] - MongoMappingContext.setApplicationContext(…) does not invoke superclass method
|
||||
* [DATAMONGO-531] - StackOverflowError when persisting Groovy beans
|
||||
@@ -593,16 +958,16 @@ Changes in version 1.1.0.M2 (2012-24-07)
|
||||
* [DATAMONGO-446] - Pageable query methods returning List are broken
|
||||
* [DATAMONGO-447] - Removal of Documents fails in in debug mode for Documents with complex ids
|
||||
* [DATAMONGO-450] - enabling DEBUG causes RuntimeException
|
||||
* [DATAMONGO-454] - ServerAddressPropertyEditor fails if a hostname is unresolvable
|
||||
* [DATAMONGO-454] - ServerAddressPropertyEditor fails if a hostname is unresolvable
|
||||
* [DATAMONGO-458] - When reading back empty collections unmodifiable instances of Collections.emptyList/Set is returned.
|
||||
* [DATAMONGO-462] - findAll() fails with NPE - discovering the root cause
|
||||
* [DATAMONGO-465] - Mongo inserts document with "_id" as an integer but saves with "_id" as a string.
|
||||
* [DATAMONGO-465] - Mongo inserts document with "_id" as an integer but saves with "_id" as a string.
|
||||
* [DATAMONGO-467] - String @id field is not mapped to ObjectId when using QueryDSL ".id" path
|
||||
* [DATAMONGO-469] - Query creation from method names using AND criteria does not work anymore
|
||||
* [DATAMONGO-474] - Wrong property is used for Id mapping
|
||||
* [DATAMONGO-475] - 'group' operation fails where query references non primitive property
|
||||
* [DATAMONGO-480] - The WriteResultChecking is not used in case of insert or save of documents.
|
||||
* [DATAMONGO-483] - @Indexed(unique=true, name="foo") puts name's value to the 'key' in the MongoDB
|
||||
* [DATAMONGO-483] - @Indexed(unique=true, name="foo") puts name's value to the 'key' in the MongoDB
|
||||
* [DATAMONGO-489] - ClassCastException when loading Map<String, String[]>
|
||||
|
||||
** Improvement
|
||||
@@ -612,7 +977,7 @@ Changes in version 1.1.0.M2 (2012-24-07)
|
||||
* [DATAMONGO-466] - QueryMapper shouldn't map id properties of nested classes
|
||||
* [DATAMONGO-470] - Criteria and Query should have proper equals(…) and hashCode() method.
|
||||
* [DATAMONGO-477] - Change upper bound of Google Guava package import to 13
|
||||
* [DATAMONGO-482] - typo in documentation - 2 i's in usiing
|
||||
* [DATAMONGO-482] - typo in documentation - 2 i's in usiing
|
||||
* [DATAMONGO-486] - Polish namspace implementation
|
||||
* [DATAMONGO-491] - Release 1.1.0.M2
|
||||
|
||||
@@ -632,13 +997,13 @@ Changes in version 1.0.3.RELEASE (2012-24-07)
|
||||
* [DATAMONGO-474] - Wrong property is used for Id mapping
|
||||
* [DATAMONGO-475] - 'group' operation fails where query references non primitive property
|
||||
* [DATAMONGO-480] - The WriteResultChecking is not used in case of insert or save of documents.
|
||||
* [DATAMONGO-483] - @Indexed(unique=true, name="foo") puts name's value to the 'key' in the MongoDB
|
||||
* [DATAMONGO-483] - @Indexed(unique=true, name="foo") puts name's value to the 'key' in the MongoDB
|
||||
* [DATAMONGO-489] - ClassCastException when loading Map<String, String[]>
|
||||
|
||||
** Improvement
|
||||
* [DATAMONGO-466] - QueryMapper shouldn't map id properties of nested classes
|
||||
* [DATAMONGO-470] - Criteria and Query should have proper equals(…) and hashCode() method.
|
||||
* [DATAMONGO-482] - typo in documentation - 2 i's in usiing
|
||||
* [DATAMONGO-482] - typo in documentation - 2 i's in usiing
|
||||
|
||||
** Task
|
||||
* [DATAMONGO-492] - Release 1.0.3
|
||||
@@ -662,7 +1027,7 @@ Changes in version 1.0.2.RELEASE (2012-06-20)
|
||||
* [DATAMONGO-446] - Pageable query methods returning List are broken
|
||||
* [DATAMONGO-447] - Removal of Documents fails in in debug mode for Documents with complex ids
|
||||
* [DATAMONGO-450] - enabling DEBUG causes RuntimeException
|
||||
* [DATAMONGO-454] - ServerAddressPropertyEditor fails if a hostname is unresolvable
|
||||
* [DATAMONGO-454] - ServerAddressPropertyEditor fails if a hostname is unresolvable
|
||||
* [DATAMONGO-461] - MappedConstructor potentially throws NullPointerException
|
||||
* [DATAMONGO-462] - findAll() fails with NPE - discovering the root cause
|
||||
|
||||
@@ -843,7 +1208,7 @@ Changes in version 1.0.0.M5 MongoDB (2011-10-24)
|
||||
* [DATAMONGO-282] - Cannot create a "range" query
|
||||
* [DATAMONGO-284] - Execution of Querydsl query maps id incorrectly
|
||||
* [DATAMONGO-285] - NPE in MappingMongoConverter.writeMapInternal when saving a Map<String,Object> with val instance of Collection
|
||||
* [DATAMONGO-288] - querying same property multiple times produces incorrect query
|
||||
* [DATAMONGO-288] - querying same property multiple times produces incorrect query
|
||||
* [DATAMONGO-289] - AbstractMongoEventListener will never call onAfterLoad
|
||||
* [DATAMONGO-294] - List elements nested in Map lose their type when persisted
|
||||
|
||||
@@ -872,7 +1237,7 @@ Changes in version 1.0.0.M5 MongoDB (2011-10-24)
|
||||
* [DATAMONGO-274] - Split up repository package according to the structure in Spring Data JPA
|
||||
|
||||
** Task
|
||||
* [DATAMONGO-264] - Ensure Data Document examples work
|
||||
* [DATAMONGO-264] - Ensure Data Document examples work
|
||||
* [DATAMONGO-265] - Create new github repository for mongodb
|
||||
* [DATAMONGO-266] - Create new github repository for CouchDB
|
||||
* [DATAMONGO-297] - Prune project directory
|
||||
@@ -905,7 +1270,7 @@ Changes in version 1.0.0.M4 MongoDB (2011-09-01)
|
||||
* [DATADOC-228] - NullPointerException when persiting Map with null values
|
||||
* [DATADOC-229] - When a parameterized List is used in the PersistentConstructor, conversion fail
|
||||
* [DATADOC-231] - spring-data-mongodb does not work in an OSGi server because of unresolved dependencies
|
||||
* [DATADOC-232] - mongodb allow to $inc many fields in one query, but Updat().inc(firs).inc(last) do only last inc
|
||||
* [DATADOC-232] - mongodb allow to $inc many fields in one query, but Updat().inc(firs).inc(last) do only last inc
|
||||
* [DATADOC-235] - Unable to map unstructured data
|
||||
* [DATADOC-236] - Repository queries do not honour order defined in method name
|
||||
* [DATADOC-237] - @Indexed annotation doesn't honor field name from @Field annotation
|
||||
@@ -919,7 +1284,7 @@ Changes in version 1.0.0.M4 MongoDB (2011-09-01)
|
||||
|
||||
** Improvement
|
||||
* [DATADOC-32] - SimpleMongoConverter could support identifying Spring EL expressions in keys
|
||||
* [DATADOC-63] - Converters to support use of a 'typeId' strategy to determine class to marshall/unmarshal from Mongo
|
||||
* [DATADOC-63] - Converters to support use of a 'typeId' strategy to determine class to marshall/unmarshal from Mongo
|
||||
* [DATADOC-166] - Check for null if various template CRUD methods
|
||||
* [DATADOC-169] - Registering custom converters for a type requires treating the type as simple in mapping context
|
||||
* [DATADOC-171] - IllegalArgumentException when persisting entity with BigDecimal field
|
||||
@@ -1002,7 +1367,7 @@ Querying / Updating
|
||||
* [DATADOC-146] - Advanced Regexp Queries
|
||||
|
||||
Mapping
|
||||
* [DATADOC-95] - Can not save an object that has not had any of its properties set
|
||||
* [DATADOC-95] - Can not save an object that has not had any of its properties set
|
||||
* [DATADOC-97] - ID replacement not working correctly when using updateFirst/updateMulti
|
||||
* [DATADOC-98] - Collection<Collection> or Object[][] doesn't save correctly
|
||||
* [DATADOC-109] - Add MappingContext to MongoConverter interface
|
||||
@@ -1053,10 +1418,10 @@ Mapping
|
||||
* [DATADOC-33] - Introduce annotation to demarcate id field in a domain object
|
||||
|
||||
Repository
|
||||
* [DATADOC-47, DATACMNS-17] - Adapted new metamodel API
|
||||
* [DATADOC-47, DATACMNS-17] - Adapted new metamodel API
|
||||
* [DATADOC-46] - Added support for 'In' and 'NotIn' keyword
|
||||
* [DATADOC-49] - Fixed 'And' and 'Or' keywords
|
||||
* [DATADOC-41] - Added support for executing QueryDsl predicates
|
||||
* [DATADOC-41] - Added support for executing QueryDsl predicates
|
||||
* [DATADOC-69] - Let repository namespace pickup the default mapping context bean and allow configuration
|
||||
* [DATADOC-24] - Allow use of @Query annotation to define queries
|
||||
* [DATADOC-34] - Create indexes for columns that are mentioned in query methods
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
Spring Data MongoDB 1.5 GA
|
||||
Copyright (c) [2010-2014] Pivotal Software, Inc.
|
||||
Spring Data MongoDB 1.5.6
|
||||
Copyright (c) [2010-2015] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
You may not use this product except in compliance with the License.
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
You may not use this product except in compliance with the License.
|
||||
|
||||
This product may include a number of subcomponents with
|
||||
separate copyright notices and license terms. Your use of the source
|
||||
|
||||
Reference in New Issue
Block a user