Compare commits
87 Commits
labs/antor
...
1.7.x
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26a31d07f5 | ||
|
|
aa5bdcd3fa | ||
|
|
2354b66c76 | ||
|
|
5b02bf0b51 | ||
|
|
2a061b9725 | ||
|
|
4f10d0cf05 | ||
|
|
8be3f02ab8 | ||
|
|
7baeb4bd9e | ||
|
|
05ca193331 | ||
|
|
b08990ccd0 | ||
|
|
f3770a4066 | ||
|
|
38b7a72f02 | ||
|
|
bccd9e22c0 | ||
|
|
6052571ff7 | ||
|
|
8ddbc3c206 | ||
|
|
2669dcb9bf | ||
|
|
295f7de8af | ||
|
|
7d20d640e8 | ||
|
|
704c130d94 | ||
|
|
3e5b4f25a4 | ||
|
|
22d49fea86 | ||
|
|
a224943f30 | ||
|
|
9d4c79e5c3 | ||
|
|
eff10210ed | ||
|
|
f151060773 | ||
|
|
7e6fd2e62a | ||
|
|
0cc050e966 | ||
|
|
e30eeaae79 | ||
|
|
b8cb2f74cf | ||
|
|
c876155d34 | ||
|
|
4e0c8a1525 | ||
|
|
eee7389d5c | ||
|
|
9890ac2aa5 | ||
|
|
2026cd3d22 | ||
|
|
4e8fdb97cc | ||
|
|
151441f908 | ||
|
|
abe2dddb1e | ||
|
|
ed777b53e4 | ||
|
|
855c9d4fab | ||
|
|
e0b8f5b566 | ||
|
|
82850d1605 | ||
|
|
8d0601550c | ||
|
|
282b63f7c2 | ||
|
|
0cb869ba97 | ||
|
|
c4fc32624b | ||
|
|
144000ed46 | ||
|
|
9f10921a48 | ||
|
|
c2127760cc | ||
|
|
d2f90bae5d | ||
|
|
b25fde4ff3 | ||
|
|
e23ca446a2 | ||
|
|
4c3b1951e2 | ||
|
|
3474f7e037 | ||
|
|
5308b8f446 | ||
|
|
e267f329fb | ||
|
|
234009cf53 | ||
|
|
ab4fb8477d | ||
|
|
76bec5e42d | ||
|
|
42beaaf47e | ||
|
|
f32afed779 | ||
|
|
b442abeef9 | ||
|
|
377c421052 | ||
|
|
fdef631c6e | ||
|
|
7ec61c9360 | ||
|
|
a95b9577da | ||
|
|
a02fe28644 | ||
|
|
d620546a3e | ||
|
|
f1621364b0 | ||
|
|
02578a0168 | ||
|
|
4bab7b32f2 | ||
|
|
0bda096e61 | ||
|
|
17139dcf3b | ||
|
|
bebfea0d33 | ||
|
|
885f0f4b70 | ||
|
|
a6000ee771 | ||
|
|
d4792cd680 | ||
|
|
949833a7db | ||
|
|
5abf1147e4 | ||
|
|
d65be17338 | ||
|
|
d1f915f702 | ||
|
|
58d50aa1ff | ||
|
|
0f30ffa090 | ||
|
|
496331d755 | ||
|
|
7658e1f1d3 | ||
|
|
221e03947b | ||
|
|
f0aed498d5 | ||
|
|
a62f6b8043 |
17
pom.xml
17
pom.xml
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,8 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.6.0.RELEASE</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
<version>1.6.3.BUILD-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -29,7 +28,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.10.0.RELEASE</springdata.commons>
|
||||
<springdata.commons>1.10.3.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>2.13.0</mongo>
|
||||
<mongo.osgi>2.13.0</mongo.osgi>
|
||||
</properties>
|
||||
@@ -108,7 +107,7 @@
|
||||
|
||||
<id>mongo-next</id>
|
||||
<properties>
|
||||
<mongo>2.13.0-SNAPSHOT</mongo>
|
||||
<mongo>2.14.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
@@ -124,7 +123,7 @@
|
||||
|
||||
<id>mongo3</id>
|
||||
<properties>
|
||||
<mongo>3.0.0-beta3</mongo>
|
||||
<mongo>3.0.2</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
@@ -157,8 +156,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -37,6 +37,8 @@ import com.mongodb.MongoException;
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
@@ -45,7 +47,7 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
protected final Logger log = LoggerFactory.getLogger(getClass());
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
@@ -76,25 +78,25 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for " + dbk);
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
for (DBObject dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: " + key);
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException("Unble to convert property " + key + ": Invalid metadata, "
|
||||
+ ENTITY_FIELD_CLASS + " not available");
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: " + key);
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
@@ -109,9 +111,9 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
|
||||
log.debug("getPersistentId called on " + entity);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
@@ -130,7 +132,7 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: " + cs.getValues());
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
@@ -152,7 +154,7 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
});
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: " + dbQuery);
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
@@ -164,7 +166,7 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
final DBObject dbDoc = new BasicDBObject();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: " + dbQuery);
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -160,7 +160,7 @@ public class MongoLog4jAppender extends AppenderSkeleton {
|
||||
|
||||
// Copy properties into document
|
||||
Map<Object, Object> props = event.getProperties();
|
||||
if (null != props && props.size() > 0) {
|
||||
if (null != props && !props.isEmpty()) {
|
||||
BasicDBObject propsDbo = new BasicDBObject();
|
||||
for (Map.Entry<Object, Object> entry : props.entrySet()) {
|
||||
propsDbo.put(entry.getKey().toString(), entry.getValue().toString());
|
||||
|
||||
@@ -39,7 +39,7 @@ public class MongoLog4jAppenderIntegrationTests {
|
||||
|
||||
static final String NAME = MongoLog4jAppenderIntegrationTests.class.getName();
|
||||
|
||||
Logger log = Logger.getLogger(NAME);
|
||||
private static final Logger log = Logger.getLogger(NAME);
|
||||
Mongo mongo;
|
||||
DB db;
|
||||
String collection;
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.7.3.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -17,8 +17,11 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -28,10 +31,13 @@ import com.mongodb.MongoCredential;
|
||||
* Parse a {@link String} to a Collection of {@link MongoCredential}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @since 1.7
|
||||
*/
|
||||
public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
private static final Pattern GROUP_PATTERN = Pattern.compile("(\\\\?')(.*?)\\1");
|
||||
|
||||
private static final String AUTH_MECHANISM_KEY = "uri.authMechanism";
|
||||
private static final String USERNAME_PASSWORD_DELIMINATOR = ":";
|
||||
private static final String DATABASE_DELIMINATOR = "@";
|
||||
@@ -51,11 +57,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
List<MongoCredential> credentials = new ArrayList<MongoCredential>();
|
||||
|
||||
for (String credentialString : text.split(",")) {
|
||||
|
||||
if (!text.contains(USERNAME_PASSWORD_DELIMINATOR) || !text.contains(DATABASE_DELIMINATOR)) {
|
||||
throw new IllegalArgumentException("Credentials need to be in format 'username:password@database'!");
|
||||
}
|
||||
for (String credentialString : extractCredentialsString(text)) {
|
||||
|
||||
String[] userNameAndPassword = extractUserNameAndPassword(credentialString);
|
||||
String database = extractDB(credentialString);
|
||||
@@ -68,43 +70,83 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
String authMechanism = options.getProperty(AUTH_MECHANISM_KEY);
|
||||
|
||||
if (MongoCredential.GSSAPI_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0]));
|
||||
} else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createMongoX509Credential(userNameAndPassword[0]));
|
||||
} else if (MongoCredential.PLAIN_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createPlainCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.SCRAM_SHA_1_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createScramSha1Credential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
credentials.add(MongoCredential.createCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(
|
||||
MongoCredential.createCredential(userNameAndPassword[0], database, userNameAndPassword[1].toCharArray()));
|
||||
}
|
||||
}
|
||||
|
||||
setValue(credentials);
|
||||
}
|
||||
|
||||
private List<String> extractCredentialsString(String source) {
|
||||
|
||||
Matcher matcher = GROUP_PATTERN.matcher(source);
|
||||
List<String> list = new ArrayList<String>();
|
||||
|
||||
while (matcher.find()) {
|
||||
|
||||
String value = StringUtils.trimLeadingCharacter(matcher.group(), '\'');
|
||||
list.add(StringUtils.trimTrailingCharacter(value, '\''));
|
||||
}
|
||||
|
||||
if (!list.isEmpty()) {
|
||||
return list;
|
||||
}
|
||||
|
||||
return Arrays.asList(source.split(","));
|
||||
}
|
||||
|
||||
private static String[] extractUserNameAndPassword(String text) {
|
||||
|
||||
int dbSeperationIndex = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
String userNameAndPassword = text.substring(0, dbSeperationIndex);
|
||||
return userNameAndPassword.split(USERNAME_PASSWORD_DELIMINATOR);
|
||||
int index = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
|
||||
index = index != -1 ? index : text.lastIndexOf(OPTIONS_DELIMINATOR);
|
||||
|
||||
return index == -1 ? new String[] {} : text.substring(0, index).split(USERNAME_PASSWORD_DELIMINATOR);
|
||||
}
|
||||
|
||||
private static String extractDB(String text) {
|
||||
|
||||
int dbSeperationIndex = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
|
||||
if (dbSeperationIndex == -1) {
|
||||
return "";
|
||||
}
|
||||
|
||||
String tmp = text.substring(dbSeperationIndex + 1);
|
||||
int optionsSeperationIndex = tmp.lastIndexOf(OPTIONS_DELIMINATOR);
|
||||
|
||||
@@ -129,4 +171,28 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
return properties;
|
||||
}
|
||||
|
||||
private static void verifyUsernameAndPasswordPresent(String[] source) {
|
||||
|
||||
verifyUserNamePresent(source);
|
||||
|
||||
if (source.length != 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Credentials need to specify username and password like in 'username:password@database'!");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyDatabasePresent(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyUserNamePresent(String[] source) {
|
||||
|
||||
if (source.length == 0 || !StringUtils.hasText(source[0])) {
|
||||
throw new IllegalArgumentException("Credentials need to specify username!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ import com.mongodb.DBCursor;
|
||||
interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns th eprepared cursor.
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param cursor
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Value object to mitigate different representations of geo command execution results in MongoDB.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @soundtrack Fruitcake - Jeff Coffin (The Inside of the Outside)
|
||||
*/
|
||||
class GeoCommandStatistics {
|
||||
|
||||
private static final GeoCommandStatistics NONE = new GeoCommandStatistics(new BasicDBObject());
|
||||
|
||||
private final DBObject source;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoCommandStatistics} instance with the given source document.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
private GeoCommandStatistics(DBObject source) {
|
||||
|
||||
Assert.notNull(source, "Source document must not be null!");
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoCommandStatistics} from the given command result extracting the statistics.
|
||||
*
|
||||
* @param commandResult must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static GeoCommandStatistics from(DBObject commandResult) {
|
||||
|
||||
Assert.notNull(commandResult, "Command result must not be null!");
|
||||
|
||||
Object stats = commandResult.get("stats");
|
||||
return stats == null ? NONE : new GeoCommandStatistics((DBObject) stats);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the average distance reported by the command result. Mitigating a removal of the field in case the command
|
||||
* didn't return any result introduced in MongoDB 3.2 RC1.
|
||||
*
|
||||
* @return
|
||||
* @see https://jira.mongodb.org/browse/SERVER-21024
|
||||
*/
|
||||
public double getAverageDistance() {
|
||||
|
||||
Object averageDistance = source.get("avgDistance");
|
||||
return averageDistance == null ? Double.NaN : (Double) averageDistance;
|
||||
}
|
||||
}
|
||||
@@ -49,7 +49,7 @@ public class MongoAction {
|
||||
* @param collectionName the collection name, must not be {@literal null} or empty.
|
||||
* @param entityType the POJO that is being operated against
|
||||
* @param document the converted DBObject from the POJO or Spring Update object
|
||||
* @param query the converted DBOjbect from the Spring Query object
|
||||
* @param query the converted DBObject from the Spring Query object
|
||||
*/
|
||||
public MongoAction(WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation,
|
||||
String collectionName, Class<?> entityType, DBObject document, DBObject query) {
|
||||
|
||||
@@ -123,7 +123,7 @@ public abstract class MongoDbUtils {
|
||||
|
||||
DB db = mongo.getDB(databaseName);
|
||||
|
||||
if (requiresAuthDbAuthentication(credentials)) {
|
||||
if (!(mongo instanceof MongoClient) && requiresAuthDbAuthentication(credentials)) {
|
||||
ReflectiveDbInvoker.authenticate(mongo, db, credentials, authenticationDatabaseName);
|
||||
}
|
||||
|
||||
@@ -199,8 +199,8 @@ public abstract class MongoDbUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if credentials present. In case we're using a monog-java-driver version 3 or above we do not have the need
|
||||
* for authentication as the auth data has to be provied within the MongoClient
|
||||
* Check if credentials present. In case we're using a mongo-java-driver version 3 or above we do not have the need
|
||||
* for authentication as the auth data has to be provided within the MongoClient
|
||||
*
|
||||
* @param credentials
|
||||
* @return
|
||||
|
||||
@@ -190,7 +190,7 @@ public interface MongoOperations {
|
||||
<T> DBCollection createCollection(Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Create a collect with a name based on the provided entity class using the options.
|
||||
* Create a collection with a name based on the provided entity class using the options.
|
||||
*
|
||||
* @param entityClass class that determines the collection to create
|
||||
* @param collectionOptions options to use when creating the collection.
|
||||
@@ -207,7 +207,7 @@ public interface MongoOperations {
|
||||
DBCollection createCollection(String collectionName);
|
||||
|
||||
/**
|
||||
* Create a collect with the provided name and options.
|
||||
* Create a collection with the provided name and options.
|
||||
*
|
||||
* @param collectionName name of the collection
|
||||
* @param collectionOptions options to use when creating the collection.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -136,6 +136,8 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Thomas Darimont
|
||||
* @author Chuong Ngo
|
||||
* @author Christoph Strobl
|
||||
* @author Doménique Tilleuil
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
@@ -335,9 +337,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), persistentEntity);
|
||||
|
||||
DBCursor cursor = collection.find(mappedQuery, mappedFields);
|
||||
QueryCursorPreparer cursorPreparer = new QueryCursorPreparer(query, entityType);
|
||||
|
||||
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType);
|
||||
|
||||
return new CloseableIterableCusorAdapter<T>(cursor, exceptionTranslator, readCallback);
|
||||
return new CloseableIterableCursorAdapter<T>(cursorPreparer.prepare(cursor), exceptionTranslator, readCallback);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -382,7 +386,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
CommandResult result = execute(new DbCallback<CommandResult>() {
|
||||
public CommandResult doInDB(DB db) throws MongoException, DataAccessException {
|
||||
return db.command(command, readPreference);
|
||||
return readPreference != null ? db.command(command, readPreference) : db.command(command);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -392,13 +396,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
protected void logCommandExecutionError(final DBObject command, CommandResult result) {
|
||||
|
||||
String error = result.getErrorMessage();
|
||||
|
||||
if (error != null) {
|
||||
// TODO: DATADOC-204 allow configuration of logging level / throw
|
||||
// throw new
|
||||
// InvalidDataAccessApiUsageException("Command execution of " +
|
||||
// command.toString() + " failed: " + error);
|
||||
LOGGER.warn("Command execution of " + command.toString() + " failed: " + error);
|
||||
LOGGER.warn("Command execution of {} failed: {}", command.toString(), error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -426,8 +428,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject fieldsObject = query.getFieldsObject();
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObject), sortObject, fieldsObject, collectionName));
|
||||
LOGGER.debug("Executing query: {} sort: {} fields: {} in collection: {}", serializeToJsonSafely(queryObject),
|
||||
sortObject, fieldsObject, collectionName);
|
||||
}
|
||||
|
||||
this.executeQueryInternal(new FindCallback(queryObject, fieldsObject), preparer, dch, collectionName);
|
||||
@@ -441,7 +443,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DB db = this.getDb();
|
||||
return action.doInDB(db);
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e);
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -457,7 +459,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBCollection collection = getAndPrepareCollection(getDb(), collectionName);
|
||||
return callback.doInCollection(collection);
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e);
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -525,7 +527,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
collection.drop();
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Dropped collection [" + collection.getFullName() + "]");
|
||||
LOGGER.debug("Dropped collection [{}]", collection.getFullName());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -626,10 +628,22 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(entityClass);
|
||||
BasicDBObject command = new BasicDBObject("geoNear", collection);
|
||||
command.putAll(near.toDBObject());
|
||||
DBObject nearDbObject = near.toDBObject();
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
BasicDBObject command = new BasicDBObject("geoNear", collection);
|
||||
command.putAll(nearDbObject);
|
||||
|
||||
if (nearDbObject.containsField("query")) {
|
||||
DBObject query = (DBObject) nearDbObject.get("query");
|
||||
command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(entityClass)));
|
||||
}
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Executing geoNear using: {} for class: {} in collection: {}", serializeToJsonSafely(command),
|
||||
entityClass, collectionName);
|
||||
}
|
||||
|
||||
CommandResult commandResult = executeCommand(command, this.readPreference);
|
||||
List<Object> results = (List<Object>) commandResult.get("results");
|
||||
results = results == null ? Collections.emptyList() : results;
|
||||
|
||||
@@ -659,9 +673,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return new GeoResults<T>(result, near.getMetric());
|
||||
}
|
||||
|
||||
DBObject stats = (DBObject) commandResult.get("stats");
|
||||
double averageDistance = stats == null ? 0 : (Double) stats.get("avgDistance");
|
||||
return new GeoResults<T>(result, new Distance(averageDistance, near.getMetric()));
|
||||
GeoCommandStatistics stats = GeoCommandStatistics.from(commandResult);
|
||||
return new GeoResults<T>(result, new Distance(stats.getAverageDistance(), near.getMetric()));
|
||||
}
|
||||
|
||||
public <T> T findAndModify(Query query, Update update, Class<T> entityClass) {
|
||||
@@ -771,11 +784,17 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
protected WriteConcern prepareWriteConcern(MongoAction mongoAction) {
|
||||
|
||||
WriteConcern wc = writeConcernResolver.resolve(mongoAction);
|
||||
return potentiallyForceAcknowledgedWrite(wc);
|
||||
}
|
||||
|
||||
if (MongoClientVersion.isMongo3Driver()
|
||||
&& ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)
|
||||
&& (wc == null || wc.getW() < 1)) {
|
||||
return WriteConcern.ACKNOWLEDGED;
|
||||
private WriteConcern potentiallyForceAcknowledgedWrite(WriteConcern wc) {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)
|
||||
&& MongoClientVersion.isMongo3Driver()) {
|
||||
if (wc == null || wc.getWObject() == null
|
||||
|| (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) {
|
||||
return WriteConcern.ACKNOWLEDGED;
|
||||
}
|
||||
}
|
||||
return wc;
|
||||
}
|
||||
@@ -841,27 +860,33 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
protected <T> void doInsertAll(Collection<? extends T> listToSave, MongoWriter<T> writer) {
|
||||
Map<String, List<T>> objs = new HashMap<String, List<T>>();
|
||||
|
||||
for (T o : listToSave) {
|
||||
Map<String, List<T>> elementsByCollection = new HashMap<String, List<T>>();
|
||||
|
||||
for (T element : listToSave) {
|
||||
|
||||
if (element == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(element.getClass());
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(o.getClass());
|
||||
if (entity == null) {
|
||||
throw new InvalidDataAccessApiUsageException("No Persitent Entity information found for the class "
|
||||
+ o.getClass().getName());
|
||||
throw new InvalidDataAccessApiUsageException("No PersistentEntity information found for " + element.getClass());
|
||||
}
|
||||
|
||||
String collection = entity.getCollection();
|
||||
List<T> collectionElements = elementsByCollection.get(collection);
|
||||
|
||||
List<T> objList = objs.get(collection);
|
||||
if (null == objList) {
|
||||
objList = new ArrayList<T>();
|
||||
objs.put(collection, objList);
|
||||
if (null == collectionElements) {
|
||||
collectionElements = new ArrayList<T>();
|
||||
elementsByCollection.put(collection, collectionElements);
|
||||
}
|
||||
objList.add(o);
|
||||
|
||||
collectionElements.add(element);
|
||||
}
|
||||
|
||||
for (Map.Entry<String, List<T>> entry : objs.entrySet()) {
|
||||
for (Map.Entry<String, List<T>> entry : elementsByCollection.entrySet()) {
|
||||
doInsertBatch(entry.getKey(), entry.getValue(), this.mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -969,9 +994,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
protected Object insertDBObject(final String collectionName, final DBObject dbDoc, final Class<?> entityClass) {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Inserting DBObject containing fields: " + dbDoc.keySet() + " in collection: " + collectionName);
|
||||
LOGGER.debug("Inserting DBObject containing fields: {} in collection: {}", dbDoc.keySet(), collectionName);
|
||||
}
|
||||
|
||||
return execute(collectionName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName,
|
||||
@@ -991,8 +1018,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Inserting list of DBObjects containing " + dbDocList.size() + " items");
|
||||
LOGGER.debug("Inserting list of DBObjects containing {} items", dbDocList.size());
|
||||
}
|
||||
|
||||
execute(collectionName, new CollectionCallback<Void>() {
|
||||
public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null,
|
||||
@@ -1019,9 +1047,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
protected Object saveDBObject(final String collectionName, final DBObject dbDoc, final Class<?> entityClass) {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Saving DBObject containing fields: " + dbDoc.keySet());
|
||||
LOGGER.debug("Saving DBObject containing fields: {}", dbDoc.keySet());
|
||||
}
|
||||
|
||||
return execute(collectionName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass,
|
||||
@@ -1087,8 +1117,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName));
|
||||
LOGGER.debug("Calling update using query: {} and update: {} in collection: {}",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName);
|
||||
}
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName,
|
||||
@@ -1323,8 +1353,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
copyMapReduceOptionsToCommand(query, mapReduceOptions, command);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Executing MapReduce on collection [" + command.getInput() + "], mapFunction [" + mapFunc
|
||||
+ "], reduceFunction [" + reduceFunc + "]");
|
||||
LOGGER.debug("Executing MapReduce on collection [{}], mapFunction [{}], reduceFunction [{}]", command.getInput(),
|
||||
mapFunc, reduceFunc);
|
||||
}
|
||||
|
||||
MapReduceOutput mapReduceOutput = inputCollection.mapReduce(command);
|
||||
@@ -1494,7 +1524,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, this.readPreference);
|
||||
handleCommandError(commandResult, command);
|
||||
|
||||
return new AggregationResults<O>(returnPotentiallyMappedResults(outputType, commandResult), commandResult);
|
||||
@@ -1537,10 +1567,17 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found!", function));
|
||||
}
|
||||
|
||||
Scanner scanner = null;
|
||||
|
||||
try {
|
||||
return new Scanner(functionResource.getInputStream()).useDelimiter("\\A").next();
|
||||
scanner = new Scanner(functionResource.getInputStream());
|
||||
return scanner.useDelimiter("\\A").next();
|
||||
} catch (IOException e) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s!", function), e);
|
||||
} finally {
|
||||
if (scanner != null) {
|
||||
scanner.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1555,8 +1592,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"Can not use skip or field specification with map reduce operations");
|
||||
}
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
if (query.getLimit() > 0 && mapReduceOptions.getLimit() == null) {
|
||||
mapReduceCommand.setLimit(query.getLimit());
|
||||
}
|
||||
if (query.getSortObject() != null) {
|
||||
@@ -1564,6 +1600,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
if (mapReduceOptions.getLimit() != null && mapReduceOptions.getLimit().intValue() > 0) {
|
||||
mapReduceCommand.setLimit(mapReduceOptions.getLimit());
|
||||
}
|
||||
|
||||
if (mapReduceOptions.getJavaScriptMode() != null) {
|
||||
mapReduceCommand.setJsMode(true);
|
||||
}
|
||||
@@ -1638,8 +1678,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
LOGGER.debug("findOne using query: {} fields: {} for class: {} in collection: {}", serializeToJsonSafely(query),
|
||||
mappedFields, entityClass, collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), new ReadDbObjectCallback<T>(
|
||||
@@ -1689,8 +1729,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName));
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName);
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback,
|
||||
@@ -1726,12 +1766,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
protected <T> T doFindAndRemove(String collectionName, DBObject query, DBObject fields, DBObject sort,
|
||||
Class<T> entityClass) {
|
||||
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), fields, sort, entityClass, collectionName));
|
||||
LOGGER.debug("findAndRemove using query: {} fields: {} sort: {} for class: {} in collection: {}",
|
||||
serializeToJsonSafely(query), fields, sort, entityClass, collectionName);
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
}
|
||||
@@ -1753,9 +1797,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
|
||||
+ "in collection: %s", serializeToJsonSafely(mappedQuery), fields, sort, entityClass,
|
||||
serializeToJsonSafely(mappedUpdate), collectionName));
|
||||
LOGGER.debug(
|
||||
"findAndModify using query: {} fields: {} sort: {} for class: {} and update: {} " + "in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate),
|
||||
collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
@@ -1803,7 +1848,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
prepareCollection(collection);
|
||||
return collection;
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e);
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1829,7 +1874,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
collectionName)));
|
||||
return result;
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e);
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1882,7 +1927,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e);
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1912,7 +1957,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e);
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1991,18 +2036,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to convert the given {@link RuntimeException} into a {@link DataAccessException} but returns the original
|
||||
* exception if the conversation failed. Thus allows safe rethrowing of the return value.
|
||||
*
|
||||
* @param ex
|
||||
* @return
|
||||
*/
|
||||
private RuntimeException potentiallyConvertRuntimeException(RuntimeException ex) {
|
||||
RuntimeException resolved = this.exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
return resolved == null ? ex : resolved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link CommandResult} for erros and potentially throws an
|
||||
* {@link InvalidDataAccessApiUsageException} for that error.
|
||||
@@ -2041,6 +2074,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to convert the given {@link RuntimeException} into a {@link DataAccessException} but returns the original
|
||||
* exception if the conversation failed. Thus allows safe re-throwing of the return value.
|
||||
*
|
||||
* @param ex the exception to translate
|
||||
* @param exceptionTranslator the {@link PersistenceExceptionTranslator} to be used for translation
|
||||
* @return
|
||||
*/
|
||||
private static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
RuntimeException resolved = exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
return resolved == null ? ex : resolved;
|
||||
}
|
||||
|
||||
// Callback implementations
|
||||
|
||||
/**
|
||||
@@ -2063,14 +2110,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
if (fields == null) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("findOne using query: %s in db.collection: %s", serializeToJsonSafely(query),
|
||||
collection.getFullName()));
|
||||
LOGGER.debug("findOne using query: {} in db.collection: {}", serializeToJsonSafely(query),
|
||||
collection.getFullName());
|
||||
}
|
||||
return collection.findOne(query);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s in db.collection: %s",
|
||||
serializeToJsonSafely(query), fields, collection.getFullName()));
|
||||
LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query), fields,
|
||||
collection.getFullName());
|
||||
}
|
||||
return collection.findOne(query, fields);
|
||||
}
|
||||
@@ -2283,7 +2330,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e);
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
|
||||
return cursorToUse;
|
||||
@@ -2330,20 +2377,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @since 1.7
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
static class CloseableIterableCusorAdapter<T> implements CloseableIterator<T> {
|
||||
static class CloseableIterableCursorAdapter<T> implements CloseableIterator<T> {
|
||||
|
||||
private volatile Cursor cursor;
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private DbObjectCallback<T> objectReadCallback;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CloseableIterableCusorAdapter} backed by the given {@link Cursor}.
|
||||
* Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link Cursor}.
|
||||
*
|
||||
* @param cursor
|
||||
* @param exceptionTranslator
|
||||
* @param objectReadCallback
|
||||
*/
|
||||
public CloseableIterableCusorAdapter(Cursor cursor, PersistenceExceptionTranslator exceptionTranslator,
|
||||
public CloseableIterableCursorAdapter(Cursor cursor, PersistenceExceptionTranslator exceptionTranslator,
|
||||
DbObjectCallback<T> objectReadCallback) {
|
||||
|
||||
this.cursor = cursor;
|
||||
@@ -2361,7 +2408,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
try {
|
||||
return cursor.hasNext();
|
||||
} catch (RuntimeException ex) {
|
||||
throw exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
throw potentiallyConvertRuntimeException(ex, exceptionTranslator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2377,7 +2424,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
T converted = objectReadCallback.doWith(item);
|
||||
return converted;
|
||||
} catch (RuntimeException ex) {
|
||||
throw exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
throw potentiallyConvertRuntimeException(ex, exceptionTranslator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2388,7 +2435,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
try {
|
||||
c.close();
|
||||
} catch (RuntimeException ex) {
|
||||
throw exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
throw potentiallyConvertRuntimeException(ex, exceptionTranslator);
|
||||
} finally {
|
||||
cursor = null;
|
||||
exceptionTranslator = null;
|
||||
|
||||
@@ -19,6 +19,7 @@ import java.net.UnknownHostException;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
@@ -103,8 +104,8 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
*/
|
||||
@Deprecated
|
||||
public SimpleMongoDbFactory(MongoURI uri) throws MongoException, UnknownHostException {
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())),
|
||||
true, uri.getDatabase());
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())), true,
|
||||
uri.getDatabase());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -132,6 +133,11 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
private SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
boolean mongoInstanceCreated, String authenticationDatabaseName) {
|
||||
|
||||
if (mongo instanceof MongoClient && (credentials != null && !UserCredentials.NO_CREDENTIALS.equals(credentials))) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"Usage of 'UserCredentials' with 'MongoClient' is no longer supported. Please use 'MongoCredential' for 'MongoClient' or just 'Mongo'.");
|
||||
}
|
||||
|
||||
Assert.notNull(mongo, "Mongo must not be null");
|
||||
Assert.hasText(databaseName, "Database name must not be empty");
|
||||
Assert.isTrue(databaseName.matches("[\\w-]+"),
|
||||
|
||||
@@ -153,7 +153,7 @@ public class Aggregation {
|
||||
protected Aggregation(List<AggregationOperation> aggregationOperations, AggregationOptions options) {
|
||||
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
Assert.isTrue(aggregationOperations.size() > 0, "At least one AggregationOperation has to be provided");
|
||||
Assert.isTrue(!aggregationOperations.isEmpty(), "At least one AggregationOperation has to be provided");
|
||||
Assert.notNull(options, "AggregationOptions must not be null!");
|
||||
|
||||
this.operations = aggregationOperations;
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
@@ -56,7 +57,7 @@ public enum AggregationFunctionExpressions {
|
||||
static class FunctionExpression implements AggregationExpression {
|
||||
|
||||
private final String name;
|
||||
private final Object[] values;
|
||||
private final List<Object> values;
|
||||
|
||||
/**
|
||||
* Creates a new {@link FunctionExpression} for the given name and values.
|
||||
@@ -70,7 +71,7 @@ public enum AggregationFunctionExpressions {
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
|
||||
this.name = name;
|
||||
this.values = values;
|
||||
this.values = Arrays.asList(values);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -80,10 +81,10 @@ public enum AggregationFunctionExpressions {
|
||||
@Override
|
||||
public DBObject toDbObject(AggregationOperationContext context) {
|
||||
|
||||
List<Object> args = new ArrayList<Object>(values.length);
|
||||
List<Object> args = new ArrayList<Object>(values.size());
|
||||
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
args.add(unpack(values[i], context));
|
||||
for (Object value : values) {
|
||||
args.add(unpack(value, context));
|
||||
}
|
||||
|
||||
return new BasicDBObject("$" + name, args);
|
||||
|
||||
@@ -88,7 +88,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either sythetic or non-synthetic way.
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either synthetic or non-synthetic way.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param synthetic
|
||||
@@ -107,7 +107,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} with the given orignals and synthetics.
|
||||
* Creates a new {@link ExposedFields} with the given originals and synthetics.
|
||||
*
|
||||
* @param originals must not be {@literal null}.
|
||||
* @param synthetic must not be {@literal null}.
|
||||
@@ -363,7 +363,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the referenve value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* Returns the reference value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -40,6 +41,7 @@ import com.mongodb.DBObject;
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
@@ -763,6 +765,20 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
return field;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#getExposedField()
|
||||
*/
|
||||
@Override
|
||||
public ExposedField getExposedField() {
|
||||
|
||||
if (!getField().isAliased()) {
|
||||
return super.getExposedField();
|
||||
}
|
||||
|
||||
return new ExposedField(new AggregationField(getField().getName()), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of this {@link OperationProjection} with the given alias.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,7 +20,7 @@ import java.math.BigInteger;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.ConversionServiceFactory;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
@@ -46,10 +46,8 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
*
|
||||
* @param conversionService
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public AbstractMongoConverter(GenericConversionService conversionService) {
|
||||
this.conversionService = conversionService == null ? ConversionServiceFactory.createDefaultConversionService()
|
||||
: conversionService;
|
||||
this.conversionService = conversionService == null ? new DefaultConversionService() : conversionService;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -77,15 +75,13 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
*/
|
||||
private void initializeConverters() {
|
||||
|
||||
if (!conversionService.canConvert(ObjectId.class, String.class)) {
|
||||
conversionService.addConverter(ObjectIdToStringConverter.INSTANCE);
|
||||
}
|
||||
if (!conversionService.canConvert(String.class, ObjectId.class)) {
|
||||
conversionService.addConverter(StringToObjectIdConverter.INSTANCE);
|
||||
}
|
||||
conversionService.addConverter(ObjectIdToStringConverter.INSTANCE);
|
||||
conversionService.addConverter(StringToObjectIdConverter.INSTANCE);
|
||||
|
||||
if (!conversionService.canConvert(ObjectId.class, BigInteger.class)) {
|
||||
conversionService.addConverter(ObjectIdToBigIntegerConverter.INSTANCE);
|
||||
}
|
||||
|
||||
if (!conversionService.canConvert(BigInteger.class, ObjectId.class)) {
|
||||
conversionService.addConverter(BigIntegerToObjectIdConverter.INSTANCE);
|
||||
}
|
||||
|
||||
@@ -44,9 +44,9 @@ import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.NamedMongoScriptToDBObjectConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToNamedMongoScriptCoverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.NamedMongoScriptToDBObjectConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToURLConverter;
|
||||
@@ -192,8 +192,8 @@ public class CustomConversions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a conversion for the given converter. Inspects either generics or the {@link ConvertiblePair}s returned
|
||||
* by a {@link GenericConverter}.
|
||||
* Registers a conversion for the given converter. Inspects either generics of {@link Converter} and
|
||||
* {@link ConverterFactory} or the {@link ConvertiblePair}s returned by a {@link GenericConverter}.
|
||||
*
|
||||
* @param converter
|
||||
*/
|
||||
@@ -208,6 +208,10 @@ public class CustomConversions {
|
||||
for (ConvertiblePair pair : genericConverter.getConvertibleTypes()) {
|
||||
register(new ConverterRegistration(pair, isReading, isWriting));
|
||||
}
|
||||
} else if (converter instanceof ConverterFactory) {
|
||||
|
||||
Class<?>[] arguments = GenericTypeResolver.resolveTypeArguments(converter.getClass(), ConverterFactory.class);
|
||||
register(new ConverterRegistration(arguments[0], arguments[1], isReading, isWriting));
|
||||
} else if (converter instanceof Converter) {
|
||||
Class<?>[] arguments = GenericTypeResolver.resolveTypeArguments(converter.getClass(), Converter.class);
|
||||
register(new ConverterRegistration(arguments[0], arguments[1], isReading, isWriting));
|
||||
|
||||
@@ -75,9 +75,7 @@ class DBObjectAccessor {
|
||||
String part = parts.next();
|
||||
|
||||
if (parts.hasNext()) {
|
||||
BasicDBObject nestedDbObject = new BasicDBObject();
|
||||
dbObject.put(part, nestedDbObject);
|
||||
dbObject = nestedDbObject;
|
||||
dbObject = getOrCreateNestedDbObject(part, dbObject);
|
||||
} else {
|
||||
dbObject.put(part, value);
|
||||
}
|
||||
@@ -116,8 +114,14 @@ class DBObjectAccessor {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given source object as map, i.e. {@link BasicDBObject}s and maps as is or {@literal null} otherwise.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<String, Object> getAsMap(Object source) {
|
||||
private static Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return (BasicDBObject) source;
|
||||
@@ -129,4 +133,26 @@ class DBObjectAccessor {
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link DBObject} which either already exists in the given source under the given key, or creates a new
|
||||
* nested one, registers it with the source and returns it.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param source must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static DBObject getOrCreateNestedDbObject(String key, DBObject source) {
|
||||
|
||||
Object existing = source.get(key);
|
||||
|
||||
if (existing instanceof BasicDBObject) {
|
||||
return (BasicDBObject) existing;
|
||||
}
|
||||
|
||||
DBObject nested = new BasicDBObject();
|
||||
source.put(key, nested);
|
||||
|
||||
return nested;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
@Override
|
||||
public DBObject fetch(DBRef dbRef) {
|
||||
return ReflectiveDBRefResolver.fetch(mongoDbFactory.getDb(), dbRef);
|
||||
return ReflectiveDBRefResolver.fetch(mongoDbFactory, dbRef);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -111,9 +111,13 @@ abstract class GeoConverters {
|
||||
@Override
|
||||
public Point convert(DBObject source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Assert.isTrue(source.keySet().size() == 2, "Source must contain 2 elements");
|
||||
|
||||
return source == null ? null : new Point((Double) source.get("x"), (Double) source.get("y"));
|
||||
return new Point((Double) source.get("x"), (Double) source.get("y"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -136,8 +136,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param typeMapper the typeMapper to set
|
||||
*/
|
||||
public void setTypeMapper(MongoTypeMapper typeMapper) {
|
||||
this.typeMapper = typeMapper == null ? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY,
|
||||
mappingContext) : typeMapper;
|
||||
this.typeMapper = typeMapper == null
|
||||
? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext) : typeMapper;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -238,7 +238,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
PersistentEntityParameterValueProvider<MongoPersistentProperty> parameterProvider = new PersistentEntityParameterValueProvider<MongoPersistentProperty>(
|
||||
entity, provider, path.getCurrentObject());
|
||||
|
||||
return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider, path);
|
||||
return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider,
|
||||
path);
|
||||
}
|
||||
|
||||
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, final ObjectPath path) {
|
||||
@@ -263,7 +264,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
accessor.setProperty(idProperty, idValue);
|
||||
}
|
||||
|
||||
final ObjectPath currentPath = path.push(result, entity, idValue);
|
||||
final ObjectPath currentPath = path.push(result, entity,
|
||||
idValue != null ? dbo.get(idProperty.getFieldName()) : null);
|
||||
|
||||
// Set properties not already set in the constructor
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
@@ -289,7 +291,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
final MongoPersistentProperty property = association.getInverse();
|
||||
Object value = dbo.get(property.getFieldName());
|
||||
|
||||
if (value == null) {
|
||||
if (value == null || entity.isConstructorArgument(property)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -510,8 +512,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
: new BasicDBObject();
|
||||
addCustomTypeKeyIfNecessary(ClassTypeInformation.from(prop.getRawType()), obj, propDbObj);
|
||||
|
||||
MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass()) ? mappingContext
|
||||
.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type);
|
||||
MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass())
|
||||
? mappingContext.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type);
|
||||
|
||||
writeInternal(obj, propDbObj, entity);
|
||||
accessor.put(prop, propDbObj);
|
||||
@@ -700,8 +702,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (mapKeyDotReplacement == null) {
|
||||
throw new MappingException(String.format("Map key %s contains dots but no replacement was configured! Make "
|
||||
+ "sure map keys don't contain dots in the first place or configure an appropriate replacement!", source));
|
||||
throw new MappingException(String.format(
|
||||
"Map key %s contains dots but no replacement was configured! Make "
|
||||
+ "sure map keys don't contain dots in the first place or configure an appropriate replacement!",
|
||||
source));
|
||||
}
|
||||
|
||||
return source.replaceAll("\\.", mapKeyDotReplacement);
|
||||
@@ -719,8 +723,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (String) key;
|
||||
}
|
||||
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class) ? (String) getPotentiallyConvertedSimpleWrite(key)
|
||||
: key.toString();
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class)
|
||||
? (String) getPotentiallyConvertedSimpleWrite(key) : key.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -889,16 +893,16 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Class<?> rawComponentType = componentType == null ? null : componentType.getType();
|
||||
|
||||
collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
|
||||
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>() : CollectionFactory
|
||||
.createCollection(collectionType, rawComponentType, sourceValue.size());
|
||||
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>()
|
||||
: CollectionFactory.createCollection(collectionType, rawComponentType, sourceValue.size());
|
||||
|
||||
for (int i = 0; i < sourceValue.size(); i++) {
|
||||
|
||||
Object dbObjItem = sourceValue.get(i);
|
||||
|
||||
if (dbObjItem instanceof DBRef) {
|
||||
items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem),
|
||||
path));
|
||||
items.add(
|
||||
DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem), path));
|
||||
} else if (dbObjItem instanceof DBObject) {
|
||||
items.add(read(componentType, (DBObject) dbObjItem, path));
|
||||
} else {
|
||||
@@ -1016,10 +1020,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.write(obj, newDbo);
|
||||
|
||||
if (typeInformation == null) {
|
||||
return removeTypeInfoRecursively(newDbo);
|
||||
return removeTypeInfo(newDbo, true);
|
||||
}
|
||||
|
||||
return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfoRecursively(newDbo);
|
||||
if (typeInformation.getType().equals(NestedDocument.class)) {
|
||||
return removeTypeInfo(newDbo, false);
|
||||
}
|
||||
|
||||
return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfo(newDbo, true);
|
||||
}
|
||||
|
||||
public BasicDBList maybeConvertList(Iterable<?> source, TypeInformation<?> typeInformation) {
|
||||
@@ -1033,12 +1041,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the type information from the conversion result.
|
||||
* Removes the type information from the entire conversion result.
|
||||
*
|
||||
* @param object
|
||||
* @param recursively whether to apply the removal recursively
|
||||
* @return
|
||||
*/
|
||||
private Object removeTypeInfoRecursively(Object object) {
|
||||
private Object removeTypeInfo(Object object, boolean recursively) {
|
||||
|
||||
if (!(object instanceof DBObject)) {
|
||||
return object;
|
||||
@@ -1046,19 +1055,29 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
DBObject dbObject = (DBObject) object;
|
||||
String keyToRemove = null;
|
||||
|
||||
for (String key : dbObject.keySet()) {
|
||||
|
||||
if (typeMapper.isTypeKey(key)) {
|
||||
keyToRemove = key;
|
||||
if (recursively) {
|
||||
|
||||
Object value = dbObject.get(key);
|
||||
|
||||
if (value instanceof BasicDBList) {
|
||||
for (Object element : (BasicDBList) value) {
|
||||
removeTypeInfo(element, recursively);
|
||||
}
|
||||
} else {
|
||||
removeTypeInfo(value, recursively);
|
||||
}
|
||||
}
|
||||
|
||||
Object value = dbObject.get(key);
|
||||
if (value instanceof BasicDBList) {
|
||||
for (Object element : (BasicDBList) value) {
|
||||
removeTypeInfoRecursively(element);
|
||||
if (typeMapper.isTypeKey(key)) {
|
||||
|
||||
keyToRemove = key;
|
||||
|
||||
if (!recursively) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
removeTypeInfoRecursively(value);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1122,8 +1141,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private class ConverterAwareSpELExpressionParameterValueProvider extends
|
||||
SpELExpressionParameterValueProvider<MongoPersistentProperty> {
|
||||
private class ConverterAwareSpELExpressionParameterValueProvider
|
||||
extends SpELExpressionParameterValueProvider<MongoPersistentProperty> {
|
||||
|
||||
private final ObjectPath path;
|
||||
|
||||
@@ -1135,7 +1154,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param delegate must not be {@literal null}.
|
||||
*/
|
||||
public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator,
|
||||
ConversionService conversionService, ParameterValueProvider<MongoPersistentProperty> delegate, ObjectPath path) {
|
||||
ConversionService conversionService, ParameterValueProvider<MongoPersistentProperty> delegate,
|
||||
ObjectPath path) {
|
||||
|
||||
super(evaluator, conversionService, delegate);
|
||||
this.path = path;
|
||||
@@ -1178,10 +1198,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
Object object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName());
|
||||
|
||||
if (object != null) {
|
||||
return (T) object;
|
||||
}
|
||||
|
||||
return (T) (object != null ? object : read(type, readRef(dbref), path));
|
||||
}
|
||||
|
||||
@@ -1194,4 +1210,15 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
DBObject readRef(DBRef ref) {
|
||||
return dbRefResolver.fetch(ref);
|
||||
}
|
||||
|
||||
/**
|
||||
* Marker class used to indicate we have a non root document object here that might be used within an update - so we
|
||||
* need to preserve type hints for potential nested elements but need to remove it on top level.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class NestedDocument {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,10 +34,13 @@ import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter.NestedDocument;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
@@ -58,6 +61,7 @@ public class QueryMapper {
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final DBObject META_TEXT_SCORE = new BasicDBObject("$meta", "textScore");
|
||||
static final ClassTypeInformation<?> NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class);
|
||||
|
||||
private enum MetaMapping {
|
||||
FORCE, WHEN_PRESENT, IGNORE;
|
||||
@@ -250,8 +254,8 @@ public class QueryMapper {
|
||||
boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists();
|
||||
Object value = keyword.getValue();
|
||||
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property) : getMappedValue(
|
||||
property.with(keyword.getKey()), value);
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property)
|
||||
: getMappedValue(property.with(keyword.getKey()), value);
|
||||
|
||||
return new BasicDBObject(keyword.key, convertedValue);
|
||||
}
|
||||
@@ -473,8 +477,8 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
try {
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService
|
||||
.convert(id, ObjectId.class) : delegateConvertToMongoType(id, null);
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService.convert(id, ObjectId.class)
|
||||
: delegateConvertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
@@ -657,6 +661,10 @@ public class QueryMapper {
|
||||
public Association<MongoPersistentProperty> getAssociation() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public TypeInformation<?> getTypeHint() {
|
||||
return ClassTypeInformation.OBJECT;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -816,7 +824,7 @@ public class QueryMapper {
|
||||
|
||||
try {
|
||||
|
||||
PropertyPath path = PropertyPath.from(pathExpression, entity.getTypeInformation());
|
||||
PropertyPath path = PropertyPath.from(pathExpression.replaceAll("\\.\\d", ""), entity.getTypeInformation());
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
@@ -849,7 +857,7 @@ public class QueryMapper {
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -862,6 +870,104 @@ public class QueryMapper {
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new AssociationConverter(getAssociation());
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class PositionParameterRetainingPropertyKeyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final KeyMapper keyMapper;
|
||||
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey) {
|
||||
this.keyMapper = new KeyMapper(rawKey);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
return keyMapper.mapPropertyName(source);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTypeHint()
|
||||
*/
|
||||
@Override
|
||||
public TypeInformation<?> getTypeHint() {
|
||||
|
||||
MongoPersistentProperty property = getProperty();
|
||||
|
||||
if (property == null) {
|
||||
return super.getTypeHint();
|
||||
}
|
||||
|
||||
if (property.getActualType().isInterface()
|
||||
|| java.lang.reflect.Modifier.isAbstract(property.getActualType().getModifiers())) {
|
||||
return ClassTypeInformation.OBJECT;
|
||||
}
|
||||
|
||||
return NESTED_DOCUMENT;
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class KeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
public KeyMapper(String key) {
|
||||
|
||||
this.iterator = Arrays.asList(key.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property));
|
||||
boolean inspect = iterator.hasNext();
|
||||
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
|
||||
if (isPositional) {
|
||||
mappedName.append(".").append(partial);
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
}
|
||||
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
private static boolean isPositionalParameter(String partial) {
|
||||
|
||||
if ("$".equals(partial)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
Long.valueOf(partial);
|
||||
return true;
|
||||
} catch (NumberFormatException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -20,9 +20,9 @@ import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
@@ -51,12 +51,14 @@ class ReflectiveDBRefResolver {
|
||||
* @param ref must not be {@literal null}.
|
||||
* @return the document that this references.
|
||||
*/
|
||||
public static DBObject fetch(DB db, DBRef ref) {
|
||||
public static DBObject fetch(MongoDbFactory factory, DBRef ref) {
|
||||
|
||||
Assert.notNull(ref, "DBRef to fetch must not be null!");
|
||||
|
||||
if (isMongo3Driver()) {
|
||||
return db.getCollection(ref.getCollectionName()).findOne(ref.getId());
|
||||
|
||||
Assert.notNull(factory, "DbFactory to fetch DB from must not be null!");
|
||||
return factory.getDb().getCollection(ref.getCollectionName()).findOne(ref.getId());
|
||||
}
|
||||
|
||||
return (DBObject) invokeMethod(FETCH_METHOD, ref);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,8 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
@@ -24,12 +22,11 @@ import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifier;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifiers;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -65,8 +62,8 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
return converter.convertToMongoType(source,
|
||||
entity == null ? ClassTypeInformation.OBJECT : getTypeHintForEntity(source, entity));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -89,7 +86,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
return getMappedUpdateModifier(field, rawValue);
|
||||
}
|
||||
|
||||
return super.getMappedObjectForField(field, getMappedValue(field, rawValue));
|
||||
return super.getMappedObjectForField(field, rawValue);
|
||||
}
|
||||
|
||||
private Entry<String, Object> getMappedUpdateModifier(Field field, Object rawValue) {
|
||||
@@ -97,14 +94,14 @@ public class UpdateMapper extends QueryMapper {
|
||||
|
||||
if (rawValue instanceof Modifier) {
|
||||
|
||||
value = getMappedValue((Modifier) rawValue);
|
||||
value = getMappedValue(field, (Modifier) rawValue);
|
||||
|
||||
} else if (rawValue instanceof Modifiers) {
|
||||
|
||||
DBObject modificationOperations = new BasicDBObject();
|
||||
|
||||
for (Modifier modifier : ((Modifiers) rawValue).getModifiers()) {
|
||||
modificationOperations.putAll(getMappedValue(modifier).toMap());
|
||||
modificationOperations.putAll(getMappedValue(field, modifier).toMap());
|
||||
}
|
||||
|
||||
value = modificationOperations;
|
||||
@@ -132,12 +129,30 @@ public class UpdateMapper extends QueryMapper {
|
||||
return value instanceof Query;
|
||||
}
|
||||
|
||||
private DBObject getMappedValue(Modifier modifier) {
|
||||
private DBObject getMappedValue(Field field, Modifier modifier) {
|
||||
|
||||
Object value = converter.convertToMongoType(modifier.getValue(), ClassTypeInformation.OBJECT);
|
||||
TypeInformation<?> typeHint = field == null ? ClassTypeInformation.OBJECT : field.getTypeHint();
|
||||
|
||||
Object value = converter.convertToMongoType(modifier.getValue(), typeHint);
|
||||
return new BasicDBObject(modifier.getKey(), value);
|
||||
}
|
||||
|
||||
private TypeInformation<?> getTypeHintForEntity(Object source, MongoPersistentEntity<?> entity) {
|
||||
|
||||
TypeInformation<?> info = entity.getTypeInformation();
|
||||
Class<?> type = info.getActualType().getType();
|
||||
|
||||
if (source == null || type.isInterface() || java.lang.reflect.Modifier.isAbstract(type.getModifiers())) {
|
||||
return info;
|
||||
}
|
||||
|
||||
if (!type.equals(source.getClass())) {
|
||||
return info;
|
||||
}
|
||||
|
||||
return NESTED_DOCUMENT;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext)
|
||||
@@ -146,8 +161,8 @@ public class UpdateMapper extends QueryMapper {
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext) : //
|
||||
new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext)
|
||||
: new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -194,7 +209,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new UpdatePropertyConverter(key);
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -206,71 +221,6 @@ public class UpdateMapper extends QueryMapper {
|
||||
return new UpdateAssociationConverter(getAssociation(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Special mapper handling positional parameter {@literal $} within property names.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
*/
|
||||
private static class UpdateKeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
protected UpdateKeyMapper(String rawKey) {
|
||||
|
||||
Assert.hasText(rawKey, "Key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(rawKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
return iterator.hasNext() && iterator.next().equals("$") ? String.format("%s.$", mappedName) : mappedName;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Special {@link Converter} for {@link MongoPersistentProperty} instances that will concatenate the {@literal $}
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
*
|
||||
* @param updateKey must not be {@literal null} or empty.
|
||||
*/
|
||||
public UpdatePropertyConverter(String updateKey) {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.mapper = new UpdateKeyMapper(updateKey);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
return mapper.mapPropertyName(property);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} retaining positional parameter {@literal $} for {@link Association}s.
|
||||
*
|
||||
@@ -278,7 +228,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
protected static class UpdateAssociationConverter extends AssociationConverter {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
private final KeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
@@ -288,7 +238,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
public UpdateAssociationConverter(Association<MongoPersistentProperty> association, String key) {
|
||||
|
||||
super(association);
|
||||
this.mapper = new UpdateKeyMapper(key);
|
||||
this.mapper = new KeyMapper(key);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
@@ -44,7 +45,7 @@ public class Index implements IndexDefinition {
|
||||
*
|
||||
* @deprecated since 1.7.
|
||||
*/
|
||||
@Deprecated//
|
||||
@Deprecated //
|
||||
DROP
|
||||
}
|
||||
|
||||
@@ -175,11 +176,18 @@ public class Index implements IndexDefinition {
|
||||
return unique();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys()
|
||||
*/
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : fieldSpec.keySet()) {
|
||||
dbo.put(k, fieldSpec.get(k).equals(Direction.ASC) ? 1 : -1);
|
||||
|
||||
for (Entry<String, Direction> entry : fieldSpec.entrySet()) {
|
||||
dbo.put(entry.getKey(), Direction.ASC.equals(entry.getValue()) ? 1 : -1);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,22 +16,24 @@
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
interface IndexResolver {
|
||||
|
||||
/**
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@code type}. {@link IndexDefinition}s are created
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s are created
|
||||
* for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @param type
|
||||
* @param typeInformation
|
||||
* @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type.
|
||||
*/
|
||||
Iterable<? extends IndexDefinitionHolder> resolveIndexForClass(Class<?> type);
|
||||
Iterable<? extends IndexDefinitionHolder> resolveIndexFor(TypeInformation<?> typeInformation);
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -60,4 +60,10 @@ public class MongoMappingEventPublisher implements ApplicationEventPublisher {
|
||||
indexCreator.onApplicationEvent((MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>) event);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationEventPublisher#publishEvent(java.lang.Object)
|
||||
*/
|
||||
public void publishEvent(Object event) {}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,7 +29,6 @@ import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexRes
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -43,8 +42,7 @@ import org.springframework.util.Assert;
|
||||
* @author Laurent Canet
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
public class MongoPersistentEntityIndexCreator implements ApplicationListener<MappingContextEvent<?, ?>> {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
|
||||
@@ -54,7 +52,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
private final IndexResolver indexResolver;
|
||||
|
||||
/**
|
||||
* Creats a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
@@ -65,7 +63,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
}
|
||||
|
||||
/**
|
||||
* Creats a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
@@ -92,7 +90,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
public void onApplicationEvent(MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty> event) {
|
||||
public void onApplicationEvent(MappingContextEvent<?, ?> event) {
|
||||
|
||||
if (!event.wasEmittedBy(mappingContext)) {
|
||||
return;
|
||||
@@ -102,7 +100,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
|
||||
// Double check type as Spring infrastructure does not consider nested generics
|
||||
if (entity instanceof MongoPersistentEntity) {
|
||||
checkForIndexes(event.getPersistentEntity());
|
||||
checkForIndexes((MongoPersistentEntity<?>) entity);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,15 +123,15 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
private void checkForAndCreateIndexes(MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.findAnnotation(Document.class) != null) {
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexForClass(entity.getType())) {
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
createIndex(indexToCreate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection())
|
||||
.createIndex(indexDefinition.getIndexKeys(), indexDefinition.getIndexOptions());
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -27,7 +27,10 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.index.Index.Duplicates;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.TextIndexIncludeOptions.IncludeStrategy;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder;
|
||||
@@ -36,6 +39,7 @@ import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -51,6 +55,7 @@ import com.mongodb.util.JSON;
|
||||
* scanning related annotations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
@@ -70,13 +75,12 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
this.mappingContext = mappingContext;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexResolver#resolveIndexForClass(java.lang.Class)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexResolver#resolveIndexForClass(org.springframework.data.util.TypeInformation)
|
||||
*/
|
||||
@Override
|
||||
public List<IndexDefinitionHolder> resolveIndexForClass(Class<?> type) {
|
||||
return resolveIndexForEntity(mappingContext.getPersistentEntity(type));
|
||||
public Iterable<? extends IndexDefinitionHolder> resolveIndexFor(TypeInformation<?> typeInformation) {
|
||||
return resolveIndexForEntity(mappingContext.getPersistentEntity(typeInformation));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -107,7 +111,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
try {
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(),
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
persistentProperty.getFieldName(), root.getCollection(), guard));
|
||||
}
|
||||
|
||||
@@ -122,6 +126,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
});
|
||||
|
||||
indexInformation.addAll(resolveIndexesForDbrefs("", root.getCollection(), root));
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
@@ -134,7 +140,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
* @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property
|
||||
* types. Will never be {@code null}.
|
||||
*/
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final Class<?> type, final String path,
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final TypeInformation<?> type, final String path,
|
||||
final String collection, final CycleGuard guard) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
@@ -152,8 +158,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
try {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(), propertyDotPath,
|
||||
collection, guard));
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
propertyDotPath, collection, guard));
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
@@ -167,6 +173,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
});
|
||||
|
||||
indexInformation.addAll(resolveIndexesForDbrefs(path, collection, entity));
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
@@ -192,18 +200,19 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return createCompoundIndexDefinitions(dotPath, collection, entity);
|
||||
}
|
||||
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(MongoPersistentEntity<?> root) {
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(
|
||||
MongoPersistentEntity<?> root) {
|
||||
|
||||
TextIndexDefinitionBuilder indexDefinitionBuilder = new TextIndexDefinitionBuilder().named(root.getType()
|
||||
.getSimpleName() + "_TextIndex");
|
||||
TextIndexDefinitionBuilder indexDefinitionBuilder = new TextIndexDefinitionBuilder()
|
||||
.named(root.getType().getSimpleName() + "_TextIndex");
|
||||
|
||||
if (StringUtils.hasText(root.getLanguage())) {
|
||||
indexDefinitionBuilder.withDefaultLanguage(root.getLanguage());
|
||||
}
|
||||
|
||||
try {
|
||||
appendTextIndexInformation("", indexDefinitionBuilder, root,
|
||||
new TextIndexIncludeOptions(IncludeStrategy.DEFAULT), new CycleGuard());
|
||||
appendTextIndexInformation("", indexDefinitionBuilder, root, new TextIndexIncludeOptions(IncludeStrategy.DEFAULT),
|
||||
new CycleGuard());
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
@@ -219,9 +228,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
}
|
||||
|
||||
private void appendTextIndexInformation(final String dotPath,
|
||||
final TextIndexDefinitionBuilder indexDefinitionBuilder, final MongoPersistentEntity<?> entity,
|
||||
final TextIndexIncludeOptions includeOptions, final CycleGuard guard) {
|
||||
private void appendTextIndexInformation(final String dotPath, final TextIndexDefinitionBuilder indexDefinitionBuilder,
|
||||
final MongoPersistentEntity<?> entity, final TextIndexIncludeOptions includeOptions, final CycleGuard guard) {
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@@ -248,8 +256,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
TextIndexIncludeOptions optionsForNestedType = includeOptions;
|
||||
if (!IncludeStrategy.FORCE.equals(includeOptions.getStrategy()) && indexed != null) {
|
||||
optionsForNestedType = new TextIndexIncludeOptions(IncludeStrategy.FORCE, new TextIndexedFieldSpec(
|
||||
propertyDotPath, weight));
|
||||
optionsForNestedType = new TextIndexIncludeOptions(IncludeStrategy.FORCE,
|
||||
new TextIndexedFieldSpec(propertyDotPath, weight));
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -258,9 +266,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage(), e);
|
||||
} catch (InvalidDataAccessApiUsageException e) {
|
||||
LOGGER.info(
|
||||
String.format("Potentially invald index structure discovered. Breaking operation for %s.",
|
||||
entity.getName()), e);
|
||||
LOGGER.info(String.format("Potentially invalid index structure discovered. Breaking operation for %s.",
|
||||
entity.getName()), e);
|
||||
}
|
||||
} else if (includeOptions.isForce() || indexed != null) {
|
||||
indexDefinitionBuilder.onField(propertyDotPath, weight);
|
||||
@@ -305,8 +312,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String fallbackCollection,
|
||||
CompoundIndex index, MongoPersistentEntity<?> entity) {
|
||||
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(resolveCompoundIndexKeyFromStringDefinition(
|
||||
dotPath, index.def()));
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(
|
||||
resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def()));
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null));
|
||||
@@ -430,13 +437,45 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
if (StringUtils.hasText(dotPath)) {
|
||||
|
||||
nameToUse = StringUtils.hasText(nameToUse) ? (property != null ? dotPath.replace("." + property.getFieldName(),
|
||||
"") : dotPath) + "." + nameToUse : dotPath;
|
||||
nameToUse = StringUtils.hasText(nameToUse)
|
||||
? (property != null ? dotPath.replace("." + property.getFieldName(), "") : dotPath) + "." + nameToUse
|
||||
: dotPath;
|
||||
}
|
||||
return nameToUse;
|
||||
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> resolveIndexesForDbrefs(final String path, final String collection,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
final List<IndexDefinitionHolder> indexes = new ArrayList<IndexDefinitionHolder>(0);
|
||||
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
MongoPersistentProperty property = association.getInverse();
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(path) ? path + "." : "") + property.getFieldName();
|
||||
|
||||
if (property.isAnnotationPresent(GeoSpatialIndexed.class) || property.isAnnotationPresent(TextIndexed.class)) {
|
||||
throw new MappingException(
|
||||
String.format("Cannot create geospatial-/text- index on DBRef in collection '%s' for path '%s'.",
|
||||
collection, propertyDotPath));
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath,
|
||||
collection, property);
|
||||
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return indexes;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used
|
||||
* to detect potential cycles within the references.
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
@@ -305,28 +306,44 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
*/
|
||||
private static class PropertyTypeAssertionHandler implements PropertyHandler<MongoPersistentProperty> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.PropertyHandler#doWithPersistentProperty(org.springframework.data.mapping.PersistentProperty)
|
||||
*/
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
potentiallyAssertTextScoreType(persistentProperty);
|
||||
potentiallyAssertLanguageType(persistentProperty);
|
||||
potentiallyAssertDBRefTargetType(persistentProperty);
|
||||
}
|
||||
|
||||
private void potentiallyAssertLanguageType(MongoPersistentProperty persistentProperty) {
|
||||
private static void potentiallyAssertLanguageType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isExplicitLanguageProperty()) {
|
||||
assertPropertyType(persistentProperty, String.class);
|
||||
}
|
||||
}
|
||||
|
||||
private void potentiallyAssertTextScoreType(MongoPersistentProperty persistentProperty) {
|
||||
private static void potentiallyAssertTextScoreType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isTextScoreProperty()) {
|
||||
assertPropertyType(persistentProperty, Float.class, Double.class);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertPropertyType(MongoPersistentProperty persistentProperty, Class<?>... validMatches) {
|
||||
private static void potentiallyAssertDBRefTargetType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isDbReference() && persistentProperty.getDBRef().lazy()) {
|
||||
if (persistentProperty.isArray() || Modifier.isFinal(persistentProperty.getActualType().getModifiers())) {
|
||||
throw new MappingException(String.format(
|
||||
"Invalid lazy DBRef property for %s. Found %s which must not be an array nor a final class.",
|
||||
persistentProperty.getField(), persistentProperty.getActualType()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertPropertyType(MongoPersistentProperty persistentProperty, Class<?>... validMatches) {
|
||||
|
||||
for (Class<?> potentialMatch : validMatches) {
|
||||
if (ClassUtils.isAssignable(potentialMatch, persistentProperty.getActualType())) {
|
||||
@@ -334,10 +351,9 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
}
|
||||
}
|
||||
|
||||
throw new MappingException(String.format("Missmatching types for %s. Found %s expected one of %s.",
|
||||
persistentProperty.getField(), persistentProperty.getActualType(),
|
||||
StringUtils.arrayToCommaDelimitedString(validMatches)));
|
||||
throw new MappingException(
|
||||
String.format("Missmatching types for %s. Found %s expected one of %s.", persistentProperty.getField(),
|
||||
persistentProperty.getActualType(), StringUtils.arrayToCommaDelimitedString(validMatches)));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
int getFieldOrder();
|
||||
|
||||
/**
|
||||
* Returns whether the propert is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* Returns whether the property is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* {@link #getDBRef()} to return an non-{@literal null} value.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -45,6 +45,8 @@ public class MapReduceOptions {
|
||||
|
||||
private Boolean verbose = true;
|
||||
|
||||
private Integer limit;
|
||||
|
||||
private Map<String, Object> extraOptions = new HashMap<String, Object>();
|
||||
|
||||
/**
|
||||
@@ -64,6 +66,8 @@ public class MapReduceOptions {
|
||||
* @return MapReduceOptions so that methods can be chained in a fluent API style
|
||||
*/
|
||||
public MapReduceOptions limit(int limit) {
|
||||
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -247,6 +251,15 @@ public class MapReduceOptions {
|
||||
return this.scopeVariables;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum number of documents for the input into the map function.
|
||||
*
|
||||
* @return {@literal null} if not set.
|
||||
*/
|
||||
public Integer getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
public DBObject getOptionsObject() {
|
||||
BasicDBObject cmd = new BasicDBObject();
|
||||
|
||||
@@ -264,6 +277,10 @@ public class MapReduceOptions {
|
||||
cmd.put("scope", scopeVariables);
|
||||
}
|
||||
|
||||
if (limit != null) {
|
||||
cmd.put("limit", limit);
|
||||
}
|
||||
|
||||
if (!extraOptions.keySet().isEmpty()) {
|
||||
cmd.putAll(extraOptions);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,13 +17,10 @@ package org.springframework.data.mongodb.core.mapreduce;
|
||||
|
||||
public class MapReduceTiming {
|
||||
|
||||
private long mapTime;
|
||||
|
||||
private long emitLoopTime;
|
||||
|
||||
private long totalTime;
|
||||
private long mapTime, emitLoopTime, totalTime;
|
||||
|
||||
public MapReduceTiming(long mapTime, long emitLoopTime, long totalTime) {
|
||||
|
||||
this.mapTime = mapTime;
|
||||
this.emitLoopTime = emitLoopTime;
|
||||
this.totalTime = totalTime;
|
||||
@@ -41,37 +38,52 @@ public class MapReduceTiming {
|
||||
return totalTime;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MapReduceTiming [mapTime=" + mapTime + ", emitLoopTime=" + emitLoopTime + ", totalTime=" + totalTime + "]";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
|
||||
result = prime * result + (int) (emitLoopTime ^ (emitLoopTime >>> 32));
|
||||
result = prime * result + (int) (mapTime ^ (mapTime >>> 32));
|
||||
result = prime * result + (int) (totalTime ^ (totalTime >>> 32));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
MapReduceTiming other = (MapReduceTiming) obj;
|
||||
if (emitLoopTime != other.emitLoopTime)
|
||||
return false;
|
||||
if (mapTime != other.mapTime)
|
||||
return false;
|
||||
if (totalTime != other.totalTime)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof MapReduceTiming)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MapReduceTiming that = (MapReduceTiming) obj;
|
||||
|
||||
return this.emitLoopTime == that.emitLoopTime && //
|
||||
this.mapTime == that.mapTime && //
|
||||
this.totalTime == that.totalTime;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.util.JSON;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author John Willemin
|
||||
*/
|
||||
public class BasicQuery extends Query {
|
||||
|
||||
@@ -70,6 +71,19 @@ public class BasicQuery extends Query {
|
||||
|
||||
@Override
|
||||
public DBObject getFieldsObject() {
|
||||
|
||||
if (fieldsObject == null) {
|
||||
return super.getFieldsObject();
|
||||
}
|
||||
|
||||
if (super.getFieldsObject() != null) {
|
||||
|
||||
DBObject combinedFieldsObject = new BasicDBObject();
|
||||
combinedFieldsObject.putAll(fieldsObject);
|
||||
combinedFieldsObject.putAll(super.getFieldsObject());
|
||||
return combinedFieldsObject;
|
||||
}
|
||||
|
||||
return fieldsObject;
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BSON;
|
||||
@@ -118,7 +119,7 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
private boolean lastOperatorWasNot() {
|
||||
return this.criteria.size() > 0 && "$not".equals(this.criteria.keySet().toArray()[this.criteria.size() - 1]);
|
||||
return !this.criteria.isEmpty() && "$not".equals(this.criteria.keySet().toArray()[this.criteria.size() - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -565,9 +566,10 @@ public class Criteria implements CriteriaDefinition {
|
||||
DBObject dbo = new BasicDBObject();
|
||||
boolean not = false;
|
||||
|
||||
for (String k : this.criteria.keySet()) {
|
||||
for (Entry<String, Object> entry : criteria.entrySet()) {
|
||||
|
||||
Object value = this.criteria.get(k);
|
||||
String key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
|
||||
if (requiresGeoJsonFormat(value)) {
|
||||
value = new BasicDBObject("$geometry", value);
|
||||
@@ -575,14 +577,14 @@ public class Criteria implements CriteriaDefinition {
|
||||
|
||||
if (not) {
|
||||
DBObject notDbo = new BasicDBObject();
|
||||
notDbo.put(k, value);
|
||||
notDbo.put(key, value);
|
||||
dbo.put("$not", notDbo);
|
||||
not = false;
|
||||
} else {
|
||||
if ("$not".equals(k) && value == null) {
|
||||
if ("$not".equals(key) && value == null) {
|
||||
not = true;
|
||||
} else {
|
||||
dbo.put(k, value);
|
||||
dbo.put(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -83,14 +83,10 @@ public class Field {
|
||||
|
||||
public DBObject getFieldsObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
DBObject dbo = new BasicDBObject(criteria);
|
||||
|
||||
for (String k : criteria.keySet()) {
|
||||
dbo.put(k, criteria.get(k));
|
||||
}
|
||||
|
||||
for (String k : slices.keySet()) {
|
||||
dbo.put(k, new BasicDBObject("$slice", slices.get(k)));
|
||||
for (Entry<String, Object> entry : slices.entrySet()) {
|
||||
dbo.put(entry.getKey(), new BasicDBObject("$slice", entry.getValue()));
|
||||
}
|
||||
|
||||
for (Entry<String, Criteria> entry : elemMatchs.entrySet()) {
|
||||
@@ -134,8 +130,8 @@ public class Field {
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean samePositionKey = this.postionKey == null ? that.postionKey == null : this.postionKey
|
||||
.equals(that.postionKey);
|
||||
boolean samePositionKey = this.postionKey == null ? that.postionKey == null
|
||||
: this.postionKey.equals(that.postionKey);
|
||||
boolean samePositionValue = this.positionValue == that.positionValue;
|
||||
|
||||
return samePositionKey && samePositionValue;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -94,9 +94,9 @@ public class Query {
|
||||
if (existing == null) {
|
||||
this.criteria.put(key, criteriaDefinition);
|
||||
} else {
|
||||
throw new InvalidMongoDbApiUsageException("Due to limitations of the com.mongodb.BasicDBObject, "
|
||||
+ "you can't add a second '" + key + "' criteria. " + "Query already contains '"
|
||||
+ existing.getCriteriaObject() + "'.");
|
||||
throw new InvalidMongoDbApiUsageException(
|
||||
"Due to limitations of the com.mongodb.BasicDBObject, " + "you can't add a second '" + key + "' criteria. "
|
||||
+ "Query already contains '" + existing.getCriteriaObject() + "'.");
|
||||
}
|
||||
|
||||
return this;
|
||||
@@ -176,7 +176,7 @@ public class Query {
|
||||
|
||||
for (Order order : sort) {
|
||||
if (order.isIgnoreCase()) {
|
||||
throw new IllegalArgumentException(String.format("Gven sort contained an Order for %s with ignore case! "
|
||||
throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case! "
|
||||
+ "MongoDB does not support sorting ignoreing case currently!", order.getProperty()));
|
||||
}
|
||||
}
|
||||
@@ -221,10 +221,8 @@ public class Query {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
|
||||
for (String k : criteria.keySet()) {
|
||||
CriteriaDefinition c = criteria.get(k);
|
||||
DBObject cl = c.getCriteriaObject();
|
||||
dbo.putAll(cl);
|
||||
for (CriteriaDefinition definition : criteria.values()) {
|
||||
dbo.putAll(definition.getCriteriaObject());
|
||||
}
|
||||
|
||||
if (!restrictedTypes.isEmpty()) {
|
||||
|
||||
@@ -63,7 +63,7 @@ public class TextCriteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* For a full list of supported languages see the mongdodb reference manual for <a
|
||||
* For a full list of supported languages see the mongodb reference manual for <a
|
||||
* href="http://docs.mongodb.org/manual/reference/text-search-languages/">Text Search Languages</a>.
|
||||
*
|
||||
* @param language
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -64,7 +64,7 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exlude fields from making
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exclude fields from making
|
||||
* it into the created {@link Update} object. Note, that this will set attributes directly and <em>not</em> use
|
||||
* {@literal $set}. This means fields not given in the {@link DBObject} will be nulled when executing the update. To
|
||||
* create an only-updating {@link Update} instance of a {@link DBObject}, call {@link #set(String, Object)} for each
|
||||
@@ -254,7 +254,7 @@ public class Update {
|
||||
* @return
|
||||
*/
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
addFieldOperation("$pullAll", key, Arrays.copyOf(values, values.length));
|
||||
addMultiFieldOperation("$pullAll", key, Arrays.copyOf(values, values.length));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -327,17 +327,22 @@ public class Update {
|
||||
}
|
||||
|
||||
public DBObject getUpdateObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : modifierOps.keySet()) {
|
||||
dbo.put(k, modifierOps.get(k));
|
||||
}
|
||||
return dbo;
|
||||
return new BasicDBObject(modifierOps);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is not called anymore rather override {@link #addMultiFieldOperation(String, String, Object)}.
|
||||
*
|
||||
* @param operator
|
||||
* @param key
|
||||
* @param value
|
||||
* @deprectaed Use {@link #addMultiFieldOperation(String, String, Object)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
protected void addFieldOperation(String operator, String key, Object value) {
|
||||
|
||||
Assert.hasText(key, "Key/Path for update must not be null or blank.");
|
||||
|
||||
modifierOps.put(operator, new BasicDBObject(key, value));
|
||||
this.keysToUpdate.add(key);
|
||||
}
|
||||
@@ -355,8 +360,8 @@ public class Update {
|
||||
if (existingValue instanceof BasicDBObject) {
|
||||
keyValueMap = (BasicDBObject) existingValue;
|
||||
} else {
|
||||
throw new InvalidDataAccessApiUsageException("Modifier Operations should be a LinkedHashMap but was "
|
||||
+ existingValue.getClass());
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"Modifier Operations should be a LinkedHashMap but was " + existingValue.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2012 the original author or authors.
|
||||
* Copyright 2002-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,6 +29,7 @@ import com.mongodb.MongoException;
|
||||
* Base class to encapsulate common configuration settings when connecting to a database
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public abstract class AbstractMonitor {
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ import org.springframework.data.annotation.QueryAnnotation;
|
||||
public @interface Query {
|
||||
|
||||
/**
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precendece over the
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the
|
||||
* method name then.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -46,7 +46,7 @@ public @interface EnableMongoRepositories {
|
||||
|
||||
/**
|
||||
* Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.:
|
||||
* {@code @EnableJpaRepositories("org.my.pkg")} instead of {@code @EnableJpaRepositories(basePackages="org.my.pkg")}.
|
||||
* {@code @EnableMongoRepositories("org.my.pkg")} instead of {@code @EnableMongoRepositories(basePackages="org.my.pkg")}.
|
||||
*/
|
||||
String[] value() default {};
|
||||
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Point;
|
||||
@@ -41,7 +44,9 @@ public class MongoParametersParameterAccessor extends ParametersParameterAccesso
|
||||
* @param values must not be {@@iteral null}.
|
||||
*/
|
||||
public MongoParametersParameterAccessor(MongoQueryMethod method, Object[] values) {
|
||||
|
||||
super(method.getParameters(), values);
|
||||
|
||||
this.method = method;
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -29,6 +30,7 @@ import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
@@ -44,6 +46,7 @@ import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom query creator to create Mongo criterias.
|
||||
@@ -55,6 +58,7 @@ import org.springframework.util.Assert;
|
||||
class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MongoQueryCreator.class);
|
||||
private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}");
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final boolean isGeoNearQuery;
|
||||
|
||||
@@ -276,19 +280,23 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters, boolean shouldNegateExpression) {
|
||||
|
||||
PropertyPath path = part.getProperty().getLeafProperty();
|
||||
|
||||
switch (part.shouldIgnoreCase()) {
|
||||
|
||||
case ALWAYS:
|
||||
if (part.getProperty().getType() != String.class) {
|
||||
throw new IllegalArgumentException(String.format("part %s must be of type String but was %s",
|
||||
part.getProperty(), part.getType()));
|
||||
if (path.getType() != String.class) {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Part %s must be of type String but was %s", path, path.getType()));
|
||||
}
|
||||
// fall-through
|
||||
|
||||
case WHEN_POSSIBLE:
|
||||
|
||||
if (shouldNegateExpression) {
|
||||
criteria = criteria.not();
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.nextConverted(property).toString());
|
||||
|
||||
case NEVER:
|
||||
@@ -365,8 +373,8 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return (T) parameter;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected parameter type of %s but got %s!", type,
|
||||
parameter.getClass()));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(PotentiallyConvertingIterator iterator, MongoPersistentProperty property) {
|
||||
@@ -374,7 +382,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
if (next instanceof Collection) {
|
||||
return ((Collection<?>) next).toArray();
|
||||
} else if (next.getClass().isArray()) {
|
||||
} else if (next != null && next.getClass().isArray()) {
|
||||
return (Object[]) next;
|
||||
}
|
||||
|
||||
@@ -384,25 +392,59 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
Type type = part.getType();
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, part);
|
||||
|
||||
switch (type) {
|
||||
case STARTING_WITH:
|
||||
source = "^" + source;
|
||||
regex = "^" + regex;
|
||||
break;
|
||||
case ENDING_WITH:
|
||||
source = source + "$";
|
||||
regex = regex + "$";
|
||||
break;
|
||||
case CONTAINING:
|
||||
case NOT_CONTAINING:
|
||||
source = "*" + source + "*";
|
||||
regex = ".*" + regex + ".*";
|
||||
break;
|
||||
case SIMPLE_PROPERTY:
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
source = "^" + source + "$";
|
||||
regex = "^" + regex + "$";
|
||||
default:
|
||||
}
|
||||
|
||||
return source.replaceAll("\\*", ".*");
|
||||
return regex;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Part qpart) {
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(Type.LIKE, qpart.getType())) {
|
||||
return PUNCTATION_PATTERN.matcher(source).find() ? Pattern.quote(source) : source;
|
||||
}
|
||||
|
||||
if ("*".equals(source)) {
|
||||
return ".*";
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
boolean leadingWildcard = source.startsWith("*");
|
||||
boolean trailingWildcard = source.endsWith("*");
|
||||
|
||||
String valueToUse = source.substring(leadingWildcard ? 1 : 0,
|
||||
trailingWildcard ? source.length() - 1 : source.length());
|
||||
|
||||
if (PUNCTATION_PATTERN.matcher(valueToUse).find()) {
|
||||
valueToUse = Pattern.quote(valueToUse);
|
||||
}
|
||||
|
||||
if (leadingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
sb.append(valueToUse);
|
||||
if (trailingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private boolean isSpherical(MongoPersistentProperty property) {
|
||||
|
||||
@@ -34,6 +34,7 @@ import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -122,13 +123,22 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
Class<?> returnedObjectType = getReturnedObjectType();
|
||||
Class<?> domainClass = getDomainClass();
|
||||
|
||||
MongoPersistentEntity<?> returnedEntity = mappingContext.getPersistentEntity(getReturnedObjectType());
|
||||
MongoPersistentEntity<?> managedEntity = mappingContext.getPersistentEntity(domainClass);
|
||||
returnedEntity = returnedEntity == null ? managedEntity : returnedEntity;
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
if (ClassUtils.isPrimitiveOrWrapper(returnedObjectType)) {
|
||||
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(), collectionEntity);
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) domainClass,
|
||||
mappingContext.getPersistentEntity(domainClass));
|
||||
|
||||
} else {
|
||||
|
||||
MongoPersistentEntity<?> returnedEntity = mappingContext.getPersistentEntity(returnedObjectType);
|
||||
MongoPersistentEntity<?> managedEntity = mappingContext.getPersistentEntity(domainClass);
|
||||
returnedEntity = returnedEntity == null ? managedEntity : returnedEntity;
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(),
|
||||
collectionEntity);
|
||||
}
|
||||
}
|
||||
|
||||
return this.metadata;
|
||||
|
||||
@@ -21,6 +21,9 @@ import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
|
||||
import org.bson.BSON;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
@@ -176,6 +179,15 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
return (String) value;
|
||||
}
|
||||
|
||||
if (value instanceof byte[]) {
|
||||
|
||||
String base64representation = DatatypeConverter.printBase64Binary((byte[]) value);
|
||||
if (!binding.isQuoted()) {
|
||||
return "{ '$binary' : '" + base64representation + "', '$type' : " + BSON.B_GENERAL + "}";
|
||||
}
|
||||
return base64representation;
|
||||
}
|
||||
|
||||
return JSON.serialize(value);
|
||||
}
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mysema.query.apt.Configuration;
|
||||
import com.mysema.query.apt.DefaultConfiguration;
|
||||
|
||||
/**
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annoated classes.
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annotated classes.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -586,7 +586,7 @@ The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
<xsd:attribute name="credentials" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of username:password@database entries to use for authentication. Appending ?uri.authMechanism allows to specify the authentication challenge mechanism.
|
||||
The comma delimited list of username:password@database entries to use for authentication. Appending ?uri.authMechanism allows to specify the authentication challenge mechanism. If the credential you're trying to pass contains a comma itself, quote it with single quotes: '…'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
|
||||
@@ -43,6 +43,9 @@ public class MongoCredentialPropertyEditorUnitTests {
|
||||
static final String USER_2_PWD = "warg";
|
||||
static final String USER_2_DB = "snow";
|
||||
|
||||
static final String USER_3_NAME = "CN=myName,OU=myOrgUnit,O=myOrg,L=myLocality,ST=myState,C=myCountry";
|
||||
static final String USER_3_DB = "stark";
|
||||
|
||||
static final String USER_1_AUTH_STRING = USER_1_NAME + ":" + USER_1_PWD + "@" + USER_1_DB;
|
||||
static final String USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM = USER_1_AUTH_STRING + "?uri.authMechanism=PLAIN";
|
||||
|
||||
@@ -50,6 +53,9 @@ public class MongoCredentialPropertyEditorUnitTests {
|
||||
static final String USER_2_AUTH_STRING_WITH_MONGODB_CR_AUTH_MECHANISM = USER_2_AUTH_STRING
|
||||
+ "?uri.authMechanism=MONGODB-CR";
|
||||
|
||||
static final String USER_3_AUTH_STRING_WITH_X509_AUTH_MECHANISM = "'" + USER_3_NAME + "@" + USER_3_DB
|
||||
+ "?uri.authMechanism=MONGODB-X509'";
|
||||
|
||||
static final MongoCredential USER_1_CREDENTIALS = MongoCredential.createCredential(USER_1_NAME, USER_1_DB,
|
||||
USER_1_PWD.toCharArray());
|
||||
static final MongoCredential USER_1_CREDENTIALS_PLAIN_AUTH = MongoCredential.createPlainCredential(USER_1_NAME,
|
||||
@@ -60,6 +66,8 @@ public class MongoCredentialPropertyEditorUnitTests {
|
||||
static final MongoCredential USER_2_CREDENTIALS_CR_AUTH = MongoCredential.createMongoCRCredential(USER_2_NAME,
|
||||
USER_2_DB, USER_2_PWD.toCharArray());
|
||||
|
||||
static final MongoCredential USER_3_CREDENTIALS_X509_AUTH = MongoCredential.createMongoX509Credential(USER_3_NAME);
|
||||
|
||||
MongoCredentialPropertyEditor editor;
|
||||
|
||||
@Before
|
||||
@@ -168,4 +176,75 @@ public class MongoCredentialPropertyEditorUnitTests {
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(USER_1_CREDENTIALS_PLAIN_AUTH, USER_2_CREDENTIALS));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleQuotedUserNamePasswordStringWithDatabaseAndNoOptions() {
|
||||
|
||||
editor.setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList("'" + USER_1_AUTH_STRING + "'", "'"
|
||||
+ USER_2_AUTH_STRING + "'")));
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void shouldReturnCredentialsValueCorrectlyWhenGivenSingleQuotedUserNamePasswordStringWithDatabaseAndNoOptions() {
|
||||
|
||||
editor.setAsText("'" + USER_1_AUTH_STRING + "'");
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(USER_1_CREDENTIALS));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void shouldReturnX509CredentialsCorrectly() {
|
||||
|
||||
editor.setAsText(USER_3_AUTH_STRING_WITH_X509_AUTH_MECHANISM);
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(USER_3_CREDENTIALS_X509_AUTH));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void shouldReturnX509CredentialsCorrectlyWhenNoDbSpecified() {
|
||||
|
||||
editor.setAsText("tyrion?uri.authMechanism=MONGODB-X509");
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(MongoCredential.createMongoX509Credential("tyrion")));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenNoDbSpecifiedForMongodbCR() {
|
||||
|
||||
editor.setAsText("tyrion?uri.authMechanism=MONGODB-CR");
|
||||
|
||||
editor.getValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenDbIsEmptyForMongodbCR() {
|
||||
|
||||
editor.setAsText("tyrion@?uri.authMechanism=MONGODB-CR");
|
||||
|
||||
editor.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate.CloseableIterableCursorAdapter;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate.DbObjectCallback;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
|
||||
import com.mongodb.Cursor;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link CloseableIterableCursorAdapter}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @see DATAMONGO-1276
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class CloseableIterableCursorAdapterUnitTests {
|
||||
|
||||
@Mock PersistenceExceptionTranslator exceptionTranslator;
|
||||
@Mock DbObjectCallback<Object> callback;
|
||||
|
||||
Cursor cursor;
|
||||
CloseableIterator<Object> adapter;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
this.cursor = doThrow(IllegalArgumentException.class).when(mock(Cursor.class));
|
||||
this.adapter = new CloseableIterableCursorAdapter<Object>(cursor, exceptionTranslator, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1276
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void propagatesOriginalExceptionFromAdapterDotNext() {
|
||||
|
||||
cursor.next();
|
||||
adapter.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1276
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void propagatesOriginalExceptionFromAdapterDotHasNext() {
|
||||
|
||||
cursor.hasNext();
|
||||
adapter.hasNext();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1276
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void propagatesOriginalExceptionFromAdapterDotClose() {
|
||||
|
||||
cursor.close();
|
||||
adapter.close();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link GeoCommandStatistics}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @soundtrack Fruitcake - Jeff Coffin (The Inside of the Outside)
|
||||
*/
|
||||
public class GeoCommandStatisticsUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1361
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullCommandResult() {
|
||||
GeoCommandStatistics.from(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1361
|
||||
*/
|
||||
@Test
|
||||
public void fallsBackToNanIfNoAverageDistanceIsAvailable() {
|
||||
|
||||
GeoCommandStatistics statistics = GeoCommandStatistics.from(new BasicDBObject("stats", null));
|
||||
assertThat(statistics.getAverageDistance(), is(Double.NaN));
|
||||
|
||||
statistics = GeoCommandStatistics.from(new BasicDBObject("stats", new BasicDBObject()));
|
||||
assertThat(statistics.getAverageDistance(), is(Double.NaN));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1361
|
||||
*/
|
||||
@Test
|
||||
public void returnsAverageDistanceIfPresent() {
|
||||
|
||||
GeoCommandStatistics statistics = GeoCommandStatistics
|
||||
.from(new BasicDBObject("stats", new BasicDBObject("avgDistance", 1.5)));
|
||||
|
||||
assertThat(statistics.getAverageDistance(), is(1.5));
|
||||
}
|
||||
}
|
||||
@@ -36,7 +36,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class MongoAdminIntegrationTests {
|
||||
|
||||
private static Log logger = LogFactory.getLog(MongoAdminIntegrationTests.class);
|
||||
private static final Log logger = LogFactory.getLog(MongoAdminIntegrationTests.class);
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
private DB testAdminDb;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
* Copyright 2012-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.*;
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@@ -27,35 +28,37 @@ import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.CannotGetMongoDbConnectionException;
|
||||
import org.springframework.data.mongodb.util.MongoClientVersion;
|
||||
import org.springframework.transaction.support.TransactionSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationUtils;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link MongoDbUtils}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Randy Watler
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class MongoDbUtilsUnitTests {
|
||||
|
||||
@Mock Mongo mongo;
|
||||
@Mock MongoClient mongoClientMock;
|
||||
@Mock DB dbMock;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
|
||||
when(mongo.getDB(anyString())).then(new Answer<DB>() {
|
||||
public DB answer(InvocationOnMock invocation) throws Throwable {
|
||||
return mock(DB.class);
|
||||
}
|
||||
});
|
||||
when(mongo.getDB(anyString())).thenReturn(dbMock).thenReturn(mock(DB.class));
|
||||
when(mongoClientMock.getDB(anyString())).thenReturn(dbMock);
|
||||
|
||||
TransactionSynchronizationManager.initSynchronization();
|
||||
}
|
||||
@@ -151,6 +154,38 @@ public class MongoDbUtilsUnitTests {
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty(), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1218
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("deprecation")
|
||||
public void getDBDAuthenticateViaAuthDbWhenCalledWithMongoInstance() {
|
||||
|
||||
assumeThat(MongoClientVersion.isMongo3Driver(), is(false));
|
||||
|
||||
when(dbMock.getName()).thenReturn("db");
|
||||
|
||||
try {
|
||||
MongoDbUtils.getDB(mongo, "db", new UserCredentials("shallan", "davar"), "authdb");
|
||||
} catch (CannotGetMongoDbConnectionException e) {
|
||||
// need to catch that one since we cannot answer the reflective call sufficiently
|
||||
}
|
||||
|
||||
verify(mongo, times(1)).getDB("authdb");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1218
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("deprecation")
|
||||
public void getDBDShouldSkipAuthenticationViaAuthDbWhenCalledWithMongoClientInstance() {
|
||||
|
||||
MongoDbUtils.getDB(mongoClientMock, "db", new UserCredentials("dalinar", "kholin"), "authdb");
|
||||
|
||||
verify(mongoClientMock, never()).getDB("authdb");
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulate transaction rollback/commit completion protocol on managed transaction synchronizations which will unbind
|
||||
* managed transaction resources. Does not swallow exceptions for testing purposes.
|
||||
|
||||
@@ -73,8 +73,10 @@ import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -187,9 +189,14 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(DocumentWithCollection.class);
|
||||
template.dropCollection(DocumentWithCollectionOfSimpleType.class);
|
||||
template.dropCollection(DocumentWithMultipleCollections.class);
|
||||
template.dropCollection(DocumentWithNestedCollection.class);
|
||||
template.dropCollection(DocumentWithEmbeddedDocumentWithCollection.class);
|
||||
template.dropCollection(DocumentWithNestedList.class);
|
||||
template.dropCollection(DocumentWithDBRefCollection.class);
|
||||
template.dropCollection(SomeContent.class);
|
||||
template.dropCollection(SomeTemplate.class);
|
||||
template.dropCollection(Address.class);
|
||||
template.dropCollection(DocumentWithCollectionOfSamples.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -2204,6 +2211,243 @@ public class MongoTemplateTests {
|
||||
assertThat(retrieved.model.value(), equalTo("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentWithNestedCollectionWhenWholeCollectionIsReplaced() {
|
||||
|
||||
DocumentWithNestedCollection doc = new DocumentWithNestedCollection();
|
||||
|
||||
Map<String, Model> entry = new HashMap<String, Model>();
|
||||
entry.put("key1", new ModelA("value1"));
|
||||
doc.models.add(entry);
|
||||
|
||||
template.save(doc);
|
||||
|
||||
entry.put("key2", new ModelA("value2"));
|
||||
|
||||
Query query = query(where("id").is(doc.id));
|
||||
Update update = Update.update("models", Collections.singletonList(entry));
|
||||
|
||||
assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue());
|
||||
|
||||
template.findAndModify(query, update, DocumentWithNestedCollection.class);
|
||||
|
||||
DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class);
|
||||
|
||||
assertThat(retrieved, is(notNullValue()));
|
||||
assertThat(retrieved.id, is(doc.id));
|
||||
|
||||
assertThat(retrieved.models.get(0).entrySet(), hasSize(2));
|
||||
|
||||
assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class));
|
||||
assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1"));
|
||||
|
||||
assertThat(retrieved.models.get(0).get("key2"), instanceOf(ModelA.class));
|
||||
assertThat(retrieved.models.get(0).get("key2").value(), equalTo("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentWithNestedCollectionWhenFirstElementIsReplaced() {
|
||||
|
||||
DocumentWithNestedCollection doc = new DocumentWithNestedCollection();
|
||||
|
||||
Map<String, Model> entry = new HashMap<String, Model>();
|
||||
entry.put("key1", new ModelA("value1"));
|
||||
doc.models.add(entry);
|
||||
|
||||
template.save(doc);
|
||||
|
||||
entry.put("key2", new ModelA("value2"));
|
||||
|
||||
Query query = query(where("id").is(doc.id));
|
||||
Update update = Update.update("models.0", entry);
|
||||
|
||||
assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue());
|
||||
|
||||
template.findAndModify(query, update, DocumentWithNestedCollection.class);
|
||||
|
||||
DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class);
|
||||
|
||||
assertThat(retrieved, is(notNullValue()));
|
||||
assertThat(retrieved.id, is(doc.id));
|
||||
|
||||
assertThat(retrieved.models.get(0).entrySet(), hasSize(2));
|
||||
|
||||
assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class));
|
||||
assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1"));
|
||||
|
||||
assertThat(retrieved.models.get(0).get("key2"), instanceOf(ModelA.class));
|
||||
assertThat(retrieved.models.get(0).get("key2").value(), equalTo("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldAddTypeInformationOnDocumentWithNestedCollectionObjectInsertedAtSecondIndex() {
|
||||
|
||||
DocumentWithNestedCollection doc = new DocumentWithNestedCollection();
|
||||
|
||||
Map<String, Model> entry = new HashMap<String, Model>();
|
||||
entry.put("key1", new ModelA("value1"));
|
||||
doc.models.add(entry);
|
||||
|
||||
template.save(doc);
|
||||
|
||||
Query query = query(where("id").is(doc.id));
|
||||
Update update = Update.update("models.1", Collections.singletonMap("key2", new ModelA("value2")));
|
||||
|
||||
assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue());
|
||||
|
||||
template.findAndModify(query, update, DocumentWithNestedCollection.class);
|
||||
|
||||
DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class);
|
||||
|
||||
assertThat(retrieved, is(notNullValue()));
|
||||
assertThat(retrieved.id, is(doc.id));
|
||||
|
||||
assertThat(retrieved.models.get(0).entrySet(), hasSize(1));
|
||||
assertThat(retrieved.models.get(1).entrySet(), hasSize(1));
|
||||
|
||||
assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class));
|
||||
assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1"));
|
||||
|
||||
assertThat(retrieved.models.get(1).get("key2"), instanceOf(ModelA.class));
|
||||
assertThat(retrieved.models.get(1).get("key2").value(), equalTo("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnEmbeddedDocumentWithCollectionWhenUpdatingPositionedElement()
|
||||
throws Exception {
|
||||
|
||||
List<Model> models = new ArrayList<Model>();
|
||||
models.add(new ModelA("value1"));
|
||||
|
||||
DocumentWithEmbeddedDocumentWithCollection doc = new DocumentWithEmbeddedDocumentWithCollection(
|
||||
new DocumentWithCollection(models));
|
||||
|
||||
template.save(doc);
|
||||
|
||||
Query query = query(where("id").is(doc.id));
|
||||
Update update = Update.update("embeddedDocument.models.0", new ModelA("value2"));
|
||||
|
||||
assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue());
|
||||
|
||||
template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class);
|
||||
|
||||
DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query,
|
||||
DocumentWithEmbeddedDocumentWithCollection.class);
|
||||
|
||||
assertThat(retrieved, notNullValue());
|
||||
assertThat(retrieved.embeddedDocument.models, hasSize(1));
|
||||
assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocumentWithCollectionWhenUpdatingSecondElement()
|
||||
throws Exception {
|
||||
|
||||
List<Model> models = new ArrayList<Model>();
|
||||
models.add(new ModelA("value1"));
|
||||
|
||||
DocumentWithEmbeddedDocumentWithCollection doc = new DocumentWithEmbeddedDocumentWithCollection(
|
||||
new DocumentWithCollection(models));
|
||||
|
||||
template.save(doc);
|
||||
|
||||
Query query = query(where("id").is(doc.id));
|
||||
Update update = Update.update("embeddedDocument.models.1", new ModelA("value2"));
|
||||
|
||||
assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue());
|
||||
|
||||
template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class);
|
||||
|
||||
DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query,
|
||||
DocumentWithEmbeddedDocumentWithCollection.class);
|
||||
|
||||
assertThat(retrieved, notNullValue());
|
||||
assertThat(retrieved.embeddedDocument.models, hasSize(2));
|
||||
assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value1"));
|
||||
assertThat(retrieved.embeddedDocument.models.get(1).value(), is("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocumentWithCollectionWhenRewriting()
|
||||
throws Exception {
|
||||
|
||||
List<Model> models = Arrays.<Model> asList(new ModelA("value1"));
|
||||
|
||||
DocumentWithEmbeddedDocumentWithCollection doc = new DocumentWithEmbeddedDocumentWithCollection(
|
||||
new DocumentWithCollection(models));
|
||||
|
||||
template.save(doc);
|
||||
|
||||
Query query = query(where("id").is(doc.id));
|
||||
Update update = Update.update("embeddedDocument",
|
||||
new DocumentWithCollection(Arrays.<Model> asList(new ModelA("value2"))));
|
||||
|
||||
assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue());
|
||||
|
||||
template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class);
|
||||
|
||||
DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query,
|
||||
DocumentWithEmbeddedDocumentWithCollection.class);
|
||||
|
||||
assertThat(retrieved, notNullValue());
|
||||
assertThat(retrieved.embeddedDocument.models, hasSize(1));
|
||||
assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnDocumentWithNestedLists() {
|
||||
|
||||
DocumentWithNestedList doc = new DocumentWithNestedList();
|
||||
|
||||
List<Model> entry = new ArrayList<Model>();
|
||||
entry.add(new ModelA("value1"));
|
||||
doc.models.add(entry);
|
||||
|
||||
template.save(doc);
|
||||
|
||||
Query query = query(where("id").is(doc.id));
|
||||
|
||||
assertThat(template.findOne(query, DocumentWithNestedList.class), notNullValue());
|
||||
|
||||
Update update = Update.update("models.0.1", new ModelA("value2"));
|
||||
|
||||
template.findAndModify(query, update, DocumentWithNestedList.class);
|
||||
|
||||
DocumentWithNestedList retrieved = template.findOne(query, DocumentWithNestedList.class);
|
||||
|
||||
assertThat(retrieved, is(notNullValue()));
|
||||
assertThat(retrieved.id, is(doc.id));
|
||||
|
||||
assertThat(retrieved.models.get(0), hasSize(2));
|
||||
|
||||
assertThat(retrieved.models.get(0).get(0), instanceOf(ModelA.class));
|
||||
assertThat(retrieved.models.get(0).get(0).value(), equalTo("value1"));
|
||||
|
||||
assertThat(retrieved.models.get(0).get(1), instanceOf(ModelA.class));
|
||||
assertThat(retrieved.models.get(0).get(1).value(), equalTo("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@@ -2609,6 +2853,33 @@ public class MongoTemplateTests {
|
||||
assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(3));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyAddToSetWithEachShouldNotAddDuplicatesNorTypeHintForSimpleDocuments() {
|
||||
|
||||
DocumentWithCollectionOfSamples doc = new DocumentWithCollectionOfSamples();
|
||||
doc.samples = Arrays.asList(new Sample(null, "sample1"));
|
||||
|
||||
template.save(doc);
|
||||
|
||||
Query query = query(where("id").is(doc.id));
|
||||
|
||||
assertThat(template.findOne(query, DocumentWithCollectionOfSamples.class), notNullValue());
|
||||
|
||||
Update update = new Update().addToSet("samples").each(new Sample(null, "sample2"), new Sample(null, "sample1"));
|
||||
|
||||
template.findAndModify(query, update, DocumentWithCollectionOfSamples.class);
|
||||
|
||||
DocumentWithCollectionOfSamples retrieved = template.findOne(query, DocumentWithCollectionOfSamples.class);
|
||||
|
||||
assertThat(retrieved, notNullValue());
|
||||
assertThat(retrieved.samples, hasSize(2));
|
||||
assertThat(retrieved.samples.get(0).field, is("sample1"));
|
||||
assertThat(retrieved.samples.get(1).field, is("sample2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-888
|
||||
*/
|
||||
@@ -2723,6 +2994,155 @@ public class MongoTemplateTests {
|
||||
assertThat(template.findAll(DBObject.class, "collection"), hasSize(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1207
|
||||
*/
|
||||
@Test
|
||||
public void ignoresNullElementsForInsertAll() {
|
||||
|
||||
Address newYork = new Address("NY", "New York");
|
||||
Address washington = new Address("DC", "Washington");
|
||||
|
||||
template.insertAll(Arrays.asList(newYork, null, washington));
|
||||
|
||||
List<Address> result = template.findAll(Address.class);
|
||||
|
||||
assertThat(result, hasSize(2));
|
||||
assertThat(result, hasItems(newYork, washington));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1208
|
||||
*/
|
||||
@Test
|
||||
public void takesSortIntoAccountWhenStreaming() {
|
||||
|
||||
Person youngestPerson = new Person("John", 20);
|
||||
Person oldestPerson = new Person("Jane", 42);
|
||||
|
||||
template.insertAll(Arrays.asList(oldestPerson, youngestPerson));
|
||||
|
||||
Query q = new Query();
|
||||
q.with(new Sort(Direction.ASC, "age"));
|
||||
CloseableIterator<Person> stream = template.stream(q, Person.class);
|
||||
|
||||
assertThat(stream.next().getAge(), is(youngestPerson.getAge()));
|
||||
assertThat(stream.next().getAge(), is(oldestPerson.getAge()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1208
|
||||
*/
|
||||
@Test
|
||||
public void takesLimitIntoAccountWhenStreaming() {
|
||||
|
||||
Person youngestPerson = new Person("John", 20);
|
||||
Person oldestPerson = new Person("Jane", 42);
|
||||
|
||||
template.insertAll(Arrays.asList(oldestPerson, youngestPerson));
|
||||
|
||||
Query q = new Query();
|
||||
q.with(new PageRequest(0, 1, new Sort(Direction.ASC, "age")));
|
||||
CloseableIterator<Person> stream = template.stream(q, Person.class);
|
||||
|
||||
assertThat(stream.next().getAge(), is(youngestPerson.getAge()));
|
||||
assertThat(stream.hasNext(), is(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1204
|
||||
*/
|
||||
@Test
|
||||
public void resolvesCyclicDBRefCorrectly() {
|
||||
|
||||
SomeMessage message = new SomeMessage();
|
||||
SomeContent content = new SomeContent();
|
||||
|
||||
template.save(message);
|
||||
template.save(content);
|
||||
|
||||
message.dbrefContent = content;
|
||||
content.dbrefMessage = message;
|
||||
|
||||
template.save(message);
|
||||
template.save(content);
|
||||
|
||||
SomeMessage messageLoaded = template.findOne(query(where("id").is(message.id)), SomeMessage.class);
|
||||
SomeContent contentLoaded = template.findOne(query(where("id").is(content.id)), SomeContent.class);
|
||||
|
||||
assertThat(messageLoaded.dbrefContent.id, is(contentLoaded.id));
|
||||
assertThat(contentLoaded.dbrefMessage.id, is(messageLoaded.id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1287
|
||||
*/
|
||||
@Test
|
||||
public void shouldReuseAlreadyResolvedLazyLoadedDBRefWhenUsedAsPersistenceConstrcutorArgument() {
|
||||
|
||||
Document docInCtor = new Document();
|
||||
docInCtor.id = "doc-in-ctor";
|
||||
template.save(docInCtor);
|
||||
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor source = new DocumentWithLazyDBrefUsedInPresistenceConstructor(
|
||||
docInCtor);
|
||||
|
||||
template.save(source);
|
||||
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)),
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor.class);
|
||||
assertThat(loaded.refToDocUsedInCtor, not(instanceOf(LazyLoadingProxy.class)));
|
||||
assertThat(loaded.refToDocNotUsedInCtor, nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1287
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotReuseLazyLoadedDBRefWhenTypeUsedInPersistenceConstrcutorButValueRefersToAnotherProperty() {
|
||||
|
||||
Document docNotUsedInCtor = new Document();
|
||||
docNotUsedInCtor.id = "doc-but-not-used-in-ctor";
|
||||
template.save(docNotUsedInCtor);
|
||||
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor source = new DocumentWithLazyDBrefUsedInPresistenceConstructor(
|
||||
null);
|
||||
source.refToDocNotUsedInCtor = docNotUsedInCtor;
|
||||
|
||||
template.save(source);
|
||||
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)),
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor.class);
|
||||
assertThat(loaded.refToDocNotUsedInCtor, instanceOf(LazyLoadingProxy.class));
|
||||
assertThat(loaded.refToDocUsedInCtor, nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1287
|
||||
*/
|
||||
@Test
|
||||
public void shouldRespectParamterValueWhenAttemptingToReuseLazyLoadedDBRefUsedInPersistenceConstrcutor() {
|
||||
|
||||
Document docInCtor = new Document();
|
||||
docInCtor.id = "doc-in-ctor";
|
||||
template.save(docInCtor);
|
||||
|
||||
Document docNotUsedInCtor = new Document();
|
||||
docNotUsedInCtor.id = "doc-but-not-used-in-ctor";
|
||||
template.save(docNotUsedInCtor);
|
||||
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor source = new DocumentWithLazyDBrefUsedInPresistenceConstructor(
|
||||
docInCtor);
|
||||
source.refToDocNotUsedInCtor = docNotUsedInCtor;
|
||||
|
||||
template.save(source);
|
||||
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)),
|
||||
DocumentWithLazyDBrefUsedInPresistenceConstructor.class);
|
||||
assertThat(loaded.refToDocUsedInCtor, not(instanceOf(LazyLoadingProxy.class)));
|
||||
assertThat(loaded.refToDocNotUsedInCtor, instanceOf(LazyLoadingProxy.class));
|
||||
}
|
||||
|
||||
static class DoucmentWithNamedIdField {
|
||||
|
||||
@Id String someIdKey;
|
||||
@@ -2798,12 +3218,36 @@ public class MongoTemplateTests {
|
||||
List<String> values;
|
||||
}
|
||||
|
||||
static class DocumentWithCollectionOfSamples {
|
||||
@Id String id;
|
||||
List<Sample> samples;
|
||||
}
|
||||
|
||||
static class DocumentWithMultipleCollections {
|
||||
@Id String id;
|
||||
List<String> string1;
|
||||
List<String> string2;
|
||||
}
|
||||
|
||||
static class DocumentWithNestedCollection {
|
||||
@Id String id;
|
||||
List<Map<String, Model>> models = new ArrayList<Map<String, Model>>();
|
||||
}
|
||||
|
||||
static class DocumentWithNestedList {
|
||||
@Id String id;
|
||||
List<List<Model>> models = new ArrayList<List<Model>>();
|
||||
}
|
||||
|
||||
static class DocumentWithEmbeddedDocumentWithCollection {
|
||||
@Id String id;
|
||||
DocumentWithCollection embeddedDocument;
|
||||
|
||||
DocumentWithEmbeddedDocumentWithCollection(DocumentWithCollection embeddedDocument) {
|
||||
this.embeddedDocument = embeddedDocument;
|
||||
}
|
||||
}
|
||||
|
||||
static interface Model {
|
||||
String value();
|
||||
|
||||
@@ -2909,6 +3353,41 @@ public class MongoTemplateTests {
|
||||
|
||||
String state;
|
||||
String city;
|
||||
|
||||
Address() {}
|
||||
|
||||
Address(String state, String city) {
|
||||
this.state = state;
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof Address)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Address that = (Address) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.city, that.city) && //
|
||||
ObjectUtils.nullSafeEquals(this.state, that.state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.city);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(this.state);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
static class VersionedPerson {
|
||||
@@ -2966,6 +3445,7 @@ public class MongoTemplateTests {
|
||||
String id;
|
||||
String text;
|
||||
String name;
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef SomeMessage dbrefMessage;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
@@ -2990,4 +3470,18 @@ public class MongoTemplateTests {
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef SomeContent dbrefContent;
|
||||
SomeContent normalContent;
|
||||
}
|
||||
|
||||
static class DocumentWithLazyDBrefUsedInPresistenceConstructor {
|
||||
|
||||
@Id String id;
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) Document refToDocUsedInCtor;
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) Document refToDocNotUsedInCtor;
|
||||
|
||||
@PersistenceConstructor
|
||||
public DocumentWithLazyDBrefUsedInPresistenceConstructor(Document refToDocUsedInCtor) {
|
||||
this.refToDocUsedInCtor = refToDocUsedInCtor;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,27 +43,35 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.CommandResult;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.MapReduceCommand;
|
||||
import com.mongodb.MapReduceOutput;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.ReadPreference;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link MongoTemplate}.
|
||||
@@ -353,6 +361,176 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
assertThat(captor.getValue(), equalTo(new BasicDBObjectBuilder().add("foo", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void aggregateShouldHonorReadPreferenceWhenSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(
|
||||
mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
template.setReadPreference(ReadPreference.secondary());
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void aggregateShouldIgnoreReadPreferenceWhenNotSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(
|
||||
mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void geoNearShouldHonorReadPreferenceWhenSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(
|
||||
mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
template.setReadPreference(ReadPreference.secondary());
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 1));
|
||||
template.geoNear(query, Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void geoNearShouldIgnoreReadPreferenceWhenNotSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(
|
||||
mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 1));
|
||||
template.geoNear(query, Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1334
|
||||
*/
|
||||
@Test
|
||||
public void mapReduceShouldUseZeroAsDefaultLimit() {
|
||||
|
||||
ArgumentCaptor<MapReduceCommand> captor = ArgumentCaptor.forClass(MapReduceCommand.class);
|
||||
|
||||
MapReduceOutput output = mock(MapReduceOutput.class);
|
||||
when(output.results()).thenReturn(Collections.<DBObject> emptySet());
|
||||
when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output);
|
||||
|
||||
Query query = new BasicQuery("{'foo':'bar'}");
|
||||
|
||||
template.mapReduce(query, "collection", "function(){}", "function(key,values){}", Wrapper.class);
|
||||
|
||||
verify(collection).mapReduce(captor.capture());
|
||||
|
||||
assertThat(captor.getValue().getLimit(), is(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1334
|
||||
*/
|
||||
@Test
|
||||
public void mapReduceShouldPickUpLimitFromQuery() {
|
||||
|
||||
ArgumentCaptor<MapReduceCommand> captor = ArgumentCaptor.forClass(MapReduceCommand.class);
|
||||
|
||||
MapReduceOutput output = mock(MapReduceOutput.class);
|
||||
when(output.results()).thenReturn(Collections.<DBObject> emptySet());
|
||||
when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output);
|
||||
|
||||
Query query = new BasicQuery("{'foo':'bar'}");
|
||||
query.limit(100);
|
||||
|
||||
template.mapReduce(query, "collection", "function(){}", "function(key,values){}", Wrapper.class);
|
||||
|
||||
verify(collection).mapReduce(captor.capture());
|
||||
|
||||
assertThat(captor.getValue().getLimit(), is(100));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1334
|
||||
*/
|
||||
@Test
|
||||
public void mapReduceShouldPickUpLimitFromOptions() {
|
||||
|
||||
ArgumentCaptor<MapReduceCommand> captor = ArgumentCaptor.forClass(MapReduceCommand.class);
|
||||
|
||||
MapReduceOutput output = mock(MapReduceOutput.class);
|
||||
when(output.results()).thenReturn(Collections.<DBObject> emptySet());
|
||||
when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output);
|
||||
|
||||
Query query = new BasicQuery("{'foo':'bar'}");
|
||||
|
||||
template.mapReduce(query, "collection", "function(){}", "function(key,values){}",
|
||||
new MapReduceOptions().limit(1000), Wrapper.class);
|
||||
|
||||
verify(collection).mapReduce(captor.capture());
|
||||
assertThat(captor.getValue().getLimit(), is(1000));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1334
|
||||
*/
|
||||
@Test
|
||||
public void mapReduceShouldPickUpLimitFromOptionsWhenQueryIsNotPresent() {
|
||||
|
||||
ArgumentCaptor<MapReduceCommand> captor = ArgumentCaptor.forClass(MapReduceCommand.class);
|
||||
|
||||
MapReduceOutput output = mock(MapReduceOutput.class);
|
||||
when(output.results()).thenReturn(Collections.<DBObject> emptySet());
|
||||
when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output);
|
||||
|
||||
template.mapReduce("collection", "function(){}", "function(key,values){}", new MapReduceOptions().limit(1000),
|
||||
Wrapper.class);
|
||||
|
||||
verify(collection).mapReduce(captor.capture());
|
||||
assertThat(captor.getValue().getLimit(), is(1000));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1334
|
||||
*/
|
||||
@Test
|
||||
public void mapReduceShouldPickUpLimitFromOptionsEvenWhenQueryDefinesItDifferently() {
|
||||
|
||||
ArgumentCaptor<MapReduceCommand> captor = ArgumentCaptor.forClass(MapReduceCommand.class);
|
||||
|
||||
MapReduceOutput output = mock(MapReduceOutput.class);
|
||||
when(output.results()).thenReturn(Collections.<DBObject> emptySet());
|
||||
when(collection.mapReduce(Mockito.any(MapReduceCommand.class))).thenReturn(output);
|
||||
|
||||
Query query = new BasicQuery("{'foo':'bar'}");
|
||||
query.limit(100);
|
||||
|
||||
template.mapReduce(query, "collection", "function(){}", "function(key,values){}",
|
||||
new MapReduceOptions().limit(1000), Wrapper.class);
|
||||
|
||||
verify(collection).mapReduce(captor.capture());
|
||||
|
||||
assertThat(captor.getValue().getLimit(), is(1000));
|
||||
}
|
||||
|
||||
class AutogenerateableId {
|
||||
|
||||
@Id BigInteger id;
|
||||
|
||||
@@ -22,10 +22,13 @@ import static org.springframework.test.util.ReflectionTestUtils.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
|
||||
@@ -43,6 +46,7 @@ import com.mongodb.MongoURI;
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class SimpleMongoDbFactoryUnitTests {
|
||||
|
||||
public @Rule ExpectedException expectedException = ExpectedException.none();
|
||||
@Mock Mongo mongo;
|
||||
|
||||
/**
|
||||
@@ -115,6 +119,46 @@ public class SimpleMongoDbFactoryUnitTests {
|
||||
assertThat(getField(factory, "authenticationDatabaseName").toString(), is("FooBar"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1260
|
||||
*/
|
||||
@Test
|
||||
public void rejectsMongoClientWithUserCredentials() {
|
||||
|
||||
expectedException.expect(InvalidDataAccessApiUsageException.class);
|
||||
expectedException.expectMessage("use 'MongoCredential' for 'MongoClient'");
|
||||
|
||||
new SimpleMongoDbFactory(mock(MongoClient.class), "cairhienin", new UserCredentials("moiraine", "sedai"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1260
|
||||
*/
|
||||
@Test
|
||||
public void rejectsMongoClientWithUserCredentialsAndAuthDb() {
|
||||
|
||||
expectedException.expect(InvalidDataAccessApiUsageException.class);
|
||||
expectedException.expectMessage("use 'MongoCredential' for 'MongoClient'");
|
||||
|
||||
new SimpleMongoDbFactory(mock(MongoClient.class), "malkieri", new UserCredentials("lan", "mandragoran"), "authdb");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1260
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotRejectMongoClientWithNoCredentials() {
|
||||
new SimpleMongoDbFactory(mock(MongoClient.class), "andoran", UserCredentials.NO_CREDENTIALS);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1260
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotRejectMongoClientWithEmptyUserCredentials() {
|
||||
new SimpleMongoDbFactory(mock(MongoClient.class), "shangtai", new UserCredentials("", ""));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private void rejectsDatabaseName(String databaseName) {
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.joda.time.LocalDate;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -24,10 +25,10 @@ import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@Document(collection = "newyork")
|
||||
public class Venue {
|
||||
|
||||
@Id
|
||||
private String id;
|
||||
@Id private String id;
|
||||
private String name;
|
||||
private double[] location;
|
||||
private LocalDate openingDate;
|
||||
|
||||
@PersistenceConstructor
|
||||
Venue(String name, double[] location) {
|
||||
@@ -50,6 +51,14 @@ public class Venue {
|
||||
return location;
|
||||
}
|
||||
|
||||
public LocalDate getOpeningDate() {
|
||||
return openingDate;
|
||||
}
|
||||
|
||||
public void setOpeningDate(LocalDate openingDate) {
|
||||
this.openingDate = openingDate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Venue [id=" + id + ", name=" + name + ", location=" + Arrays.toString(location) + "]";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,6 +30,7 @@ import org.junit.rules.ExpectedException;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -37,6 +38,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AggregationUnitTests {
|
||||
|
||||
@@ -283,6 +285,40 @@ public class AggregationUnitTests {
|
||||
is((DBObject) new BasicDBObject("_id", "$someKey").append("doc", new BasicDBObject("$first", "$$ROOT"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1254
|
||||
*/
|
||||
@Test
|
||||
public void shouldExposeAliasedFieldnameForProjectionsIncludingOperationsDownThePipeline() {
|
||||
|
||||
DBObject agg = Aggregation.newAggregation(//
|
||||
project("date") //
|
||||
.and("tags").minus(10).as("tags_count")//
|
||||
, group("date")//
|
||||
.sum("tags_count").as("count")//
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
DBObject group = extractPipelineElement(agg, 1, "$group");
|
||||
assertThat(getAsDBObject(group, "count"), is(new BasicDBObjectBuilder().add("$sum", "$tags_count").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1254
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseAliasedFieldnameForProjectionsIncludingOperationsDownThePipelineWhenUsingSpEL() {
|
||||
|
||||
DBObject agg = Aggregation.newAggregation(//
|
||||
project("date") //
|
||||
.andExpression("tags-10")//
|
||||
, group("date")//
|
||||
.sum("tags_count").as("count")//
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
DBObject group = extractPipelineElement(agg, 1, "$group");
|
||||
assertThat(getAsDBObject(group, "count"), is(new BasicDBObjectBuilder().add("$sum", "$tags_count").get()));
|
||||
}
|
||||
|
||||
private DBObject extractPipelineElement(DBObject agg, int index, String operation) {
|
||||
|
||||
List<DBObject> pipeline = (List<DBObject>) agg.get("pipeline");
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToObjectIdConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link AbstractMongoConverter}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class AbstractMongoConverterUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1324
|
||||
*/
|
||||
@Test
|
||||
public void registersObjectIdConvertersExplicitly() {
|
||||
|
||||
DefaultConversionService conversionService = spy(new DefaultConversionService());
|
||||
|
||||
new SampleMongoConverter(conversionService).afterPropertiesSet();
|
||||
|
||||
verify(conversionService).addConverter(StringToObjectIdConverter.INSTANCE);
|
||||
verify(conversionService).addConverter(ObjectIdToStringConverter.INSTANCE);
|
||||
}
|
||||
|
||||
static class SampleMongoConverter extends AbstractMongoConverter {
|
||||
|
||||
public SampleMongoConverter(GenericConversionService conversionService) {
|
||||
super(conversionService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoTypeMapper getTypeMapper() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> getMappingContext() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R> R read(Class<R> type, DBObject source) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Object source, DBObject sink) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object convertToMongoType(Object obj, TypeInformation<?> typeInformation) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,7 +21,9 @@ import static org.junit.Assert.*;
|
||||
import java.net.URL;
|
||||
import java.text.DateFormat;
|
||||
import java.text.Format;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.UUID;
|
||||
@@ -32,8 +34,10 @@ import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.converter.ConverterFactory;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
|
||||
import org.threeten.bp.LocalDateTime;
|
||||
|
||||
@@ -43,12 +47,11 @@ import com.mongodb.DBRef;
|
||||
* Unit tests for {@link CustomConversions}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @auhtor Christoph Strobl
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class CustomConversionsUnitTests {
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void findsBasicReadAndWriteConversions() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(FormatToStringConverter.INSTANCE,
|
||||
@@ -62,7 +65,6 @@ public class CustomConversionsUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void considersSubtypesCorrectly() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(NumberToStringConverter.INSTANCE,
|
||||
@@ -132,6 +134,7 @@ public class CustomConversionsUnitTests {
|
||||
*/
|
||||
@Test
|
||||
public void doesNotConsiderTypeSimpleIfOnlyReadConverterIsRegistered() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(StringToFormatConverter.INSTANCE));
|
||||
assertThat(conversions.isSimpleType(Format.class), is(false));
|
||||
}
|
||||
@@ -257,6 +260,17 @@ public class CustomConversionsUnitTests {
|
||||
assertThat(customConversions.hasCustomWriteTarget(LocalDateTime.class), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1302
|
||||
*/
|
||||
@Test
|
||||
public void registersConverterFactoryCorrectly() {
|
||||
|
||||
CustomConversions customConversions = new CustomConversions(Collections.singletonList(new FormatConverterFactory()));
|
||||
|
||||
assertThat(customConversions.getCustomWriteTarget(String.class, SimpleDateFormat.class), notNullValue());
|
||||
}
|
||||
|
||||
private static Class<?> createProxyTypeFor(Class<?> type) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
@@ -331,4 +345,37 @@ public class CustomConversionsUnitTests {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
static class FormatConverterFactory implements ConverterFactory<String, Format> {
|
||||
|
||||
@Override
|
||||
public <T extends Format> Converter<String, T> getConverter(Class<T> targetType) {
|
||||
return new StringToFormat<T>(targetType);
|
||||
}
|
||||
|
||||
private static final class StringToFormat<T extends Format> implements Converter<String, T> {
|
||||
|
||||
private final Class<T> targetType;
|
||||
|
||||
public StringToFormat(Class<T> targetType) {
|
||||
this.targetType = targetType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T convert(String source) {
|
||||
|
||||
if (source.length() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return targetType.newInstance();
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,6 +79,28 @@ public class DBObjectAccessorUnitTests {
|
||||
new DBObjectAccessor(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1335
|
||||
*/
|
||||
@Test
|
||||
public void writesAllNestingsCorrectly() {
|
||||
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(TypeWithTwoNestings.class);
|
||||
|
||||
BasicDBObject target = new BasicDBObject();
|
||||
|
||||
DBObjectAccessor accessor = new DBObjectAccessor(target);
|
||||
accessor.put(entity.getPersistentProperty("id"), "id");
|
||||
accessor.put(entity.getPersistentProperty("b"), "b");
|
||||
accessor.put(entity.getPersistentProperty("c"), "c");
|
||||
|
||||
DBObject nestedA = DBObjectTestUtils.getAsDBObject(target, "a");
|
||||
|
||||
assertThat(nestedA, is(notNullValue()));
|
||||
assertThat(nestedA.get("b"), is((Object) "b"));
|
||||
assertThat(nestedA.get("c"), is((Object) "c"));
|
||||
}
|
||||
|
||||
static class ProjectingType {
|
||||
|
||||
String name;
|
||||
@@ -91,4 +113,10 @@ public class DBObjectAccessorUnitTests {
|
||||
String c;
|
||||
}
|
||||
|
||||
static class TypeWithTwoNestings {
|
||||
|
||||
String id;
|
||||
@Field("a.b") String b;
|
||||
@Field("a.c") String c;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2402,10 +2402,10 @@ public class MappingMongoConverterUnitTests {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (source.equals("foo-enum-value")) {
|
||||
if ("foo-enum-value".equals(source)) {
|
||||
return FooBarEnum.FOO;
|
||||
}
|
||||
if (source.equals("bar-enum-value")) {
|
||||
if ("bar-enum-value".equals(source)) {
|
||||
return FooBarEnum.BAR;
|
||||
}
|
||||
|
||||
|
||||
@@ -775,6 +775,34 @@ public class QueryMapperUnitTests {
|
||||
assertThat(dbo, isBsonObject().containing("geoJsonPoint.$geoWithin.$geometry.type", "Polygon"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1269
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainNumericMapKey() {
|
||||
|
||||
Query query = query(where("map.1.stringProperty").is("ba'alzamon"));
|
||||
|
||||
DBObject dbo = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(EntityWithComplexValueTypeMap.class));
|
||||
|
||||
assertThat(dbo.containsField("map.1.stringProperty"), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1269
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainNumericPositionInList() {
|
||||
|
||||
Query query = query(where("list.1.stringProperty").is("ba'alzamon"));
|
||||
|
||||
DBObject dbo = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(EntityWithComplexValueTypeList.class));
|
||||
|
||||
assertThat(dbo.containsField("list.1.stringProperty"), is(true));
|
||||
}
|
||||
|
||||
@Document
|
||||
public class Foo {
|
||||
@Id private ObjectId id;
|
||||
@@ -875,4 +903,18 @@ public class QueryMapperUnitTests {
|
||||
GeoJsonPoint geoJsonPoint;
|
||||
@Field("geoJsonPointWithNameViaFieldAnnotation") GeoJsonPoint namedGeoJsonPoint;
|
||||
}
|
||||
|
||||
static class SimpeEntityWithoutId {
|
||||
|
||||
String stringProperty;
|
||||
Integer integerProperty;
|
||||
}
|
||||
|
||||
static class EntityWithComplexValueTypeMap {
|
||||
Map<Integer, SimpeEntityWithoutId> map;
|
||||
}
|
||||
|
||||
static class EntityWithComplexValueTypeList {
|
||||
List<SimpeEntityWithoutId> list;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,97 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.core.Is.*;
|
||||
import static org.hamcrest.core.IsNull.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.*;
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.util.MongoClientVersion.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link ReflectiveDBRefResolver}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class ReflectiveDBRefResolverUnitTests {
|
||||
|
||||
@Mock MongoDbFactory dbFactoryMock;
|
||||
@Mock DBRef dbRefMock;
|
||||
@Mock DB dbMock;
|
||||
@Mock DBCollection collectionMock;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
when(dbRefMock.getCollectionName()).thenReturn("collection-1");
|
||||
when(dbRefMock.getId()).thenReturn("id-1");
|
||||
when(dbFactoryMock.getDb()).thenReturn(dbMock);
|
||||
when(dbMock.getCollection(eq("collection-1"))).thenReturn(collectionMock);
|
||||
when(collectionMock.findOne(eq("id-1"))).thenReturn(new BasicDBObject("_id", "id-1"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1193
|
||||
*/
|
||||
@Test
|
||||
public void fetchShouldNotLookUpDbWhenUsingDriverVersion2() {
|
||||
|
||||
assumeThat(isMongo3Driver(), is(false));
|
||||
|
||||
ReflectiveDBRefResolver.fetch(dbFactoryMock, dbRefMock);
|
||||
|
||||
verify(dbFactoryMock, never()).getDb();
|
||||
verify(dbFactoryMock, never()).getDb(anyString());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1193
|
||||
*/
|
||||
@Test
|
||||
public void fetchShouldUseDbToResolveDbRefWhenUsingDriverVersion3() {
|
||||
|
||||
assumeThat(isMongo3Driver(), is(true));
|
||||
|
||||
assertThat(ReflectiveDBRefResolver.fetch(dbFactoryMock, dbRefMock), notNullValue());
|
||||
verify(dbFactoryMock, times(1)).getDb();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1193
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void fetchShouldThrowExceptionWhenDbFactoryIsNullUsingDriverVersion3() {
|
||||
|
||||
assumeThat(isMongo3Driver(), is(true));
|
||||
|
||||
ReflectiveDBRefResolver.fetch(null, dbRefMock);
|
||||
}
|
||||
}
|
||||
@@ -20,9 +20,13 @@ import static org.hamcrest.collection.IsMapContaining.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.*;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.collection.IsIterableContainingInOrder;
|
||||
@@ -39,6 +43,9 @@ import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.Allocation;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.AllocationToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.StringToAllocationConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
@@ -428,8 +435,8 @@ public class UpdateMapperUnitTests {
|
||||
public void rendersNestedDbRefCorrectly() {
|
||||
|
||||
Update update = new Update().pull("nested.dbRefAnnotatedList.id", "2");
|
||||
DBObject mappedObject = mapper
|
||||
.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Wrapper.class));
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(Wrapper.class));
|
||||
|
||||
DBObject pullClause = getAsDBObject(mappedObject, "$pull");
|
||||
assertThat(pullClause.containsField("mapped.dbRefAnnotatedList"), is(true));
|
||||
@@ -524,7 +531,6 @@ public class UpdateMapperUnitTests {
|
||||
assertThat(((DBObject) updateValue).get("_class").toString(),
|
||||
equalTo("org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests$ModelImpl"));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -595,6 +601,274 @@ public class UpdateMapperUnitTests {
|
||||
assertThat($unset, equalTo(new BasicDBObjectBuilder().add("dbRefAnnotatedList.$", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void mappingEachOperatorShouldNotAddTypeInfoForNonInterfaceNonAbstractTypes() {
|
||||
|
||||
Update update = new Update().addToSet("nestedDocs").each(new NestedDocument("nested-1"),
|
||||
new NestedDocument("nested-2"));
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DocumentWithNestedCollection.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$addToSet.nestedDocs.$each.[0]._class"));
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$addToSet.nestedDocs.$each.[1]._class"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void mappingEachOperatorShouldAddTypeHintForInterfaceTypes() {
|
||||
|
||||
Update update = new Update().addToSet("models").each(new ModelImpl(1), new ModelImpl(2));
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ListModelWrapper.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$addToSet.models.$each.[0]._class", ModelImpl.class.getName()));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$addToSet.models.$each.[1]._class", ModelImpl.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void mappingEachOperatorShouldAddTypeHintForAbstractTypes() {
|
||||
|
||||
Update update = new Update().addToSet("list").each(new ConcreteChildClass("foo", "one"),
|
||||
new ConcreteChildClass("bar", "two"));
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
assertThat(mappedUpdate,
|
||||
isBsonObject().containing("$addToSet.aliased.$each.[0]._class", ConcreteChildClass.class.getName()));
|
||||
assertThat(mappedUpdate,
|
||||
isBsonObject().containing("$addToSet.aliased.$each.[1]._class", ConcreteChildClass.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldOnlyRemoveTypeHintFromTopLevelTypeInCaseOfNestedDocument() {
|
||||
|
||||
WrapperAroundInterfaceType wait = new WrapperAroundInterfaceType();
|
||||
wait.interfaceType = new ModelImpl(1);
|
||||
|
||||
Update update = new Update().addToSet("listHoldingConcretyTypeWithInterfaceTypeAttribute").each(wait);
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DomainTypeWithListOfConcreteTypesHavingSingleInterfaceTypeAttribute.class));
|
||||
|
||||
assertThat(mappedUpdate,
|
||||
isBsonObject().notContaining("$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0]._class"));
|
||||
assertThat(mappedUpdate,
|
||||
isBsonObject().containing(
|
||||
"$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0].interfaceType._class",
|
||||
ModelImpl.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1210
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainTypeInformationOfNestedListWhenUpdatingConcreteyParentType() {
|
||||
|
||||
ListModelWrapper lmw = new ListModelWrapper();
|
||||
lmw.models = Collections.<Model> singletonList(new ModelImpl(1));
|
||||
|
||||
Update update = new Update().set("concreteTypeWithListAttributeOfInterfaceType", lmw);
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteTypeWithListAttributeOfInterfaceType._class"));
|
||||
assertThat(
|
||||
mappedUpdate,
|
||||
isBsonObject().containing("$set.concreteTypeWithListAttributeOfInterfaceType.models.[0]._class",
|
||||
ModelImpl.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainTypeInformationForObjectValues() {
|
||||
|
||||
Update update = new Update().set("value", new NestedDocument("kaladin"));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.value.name", "kaladin"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.value._class", NestedDocument.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldNotRetainTypeInformationForConcreteValues() {
|
||||
|
||||
Update update = new Update().set("concreteValue", new NestedDocument("shallan"));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.concreteValue.name", "shallan"));
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteValue._class"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainTypeInformationForObjectValuesWithAlias() {
|
||||
|
||||
Update update = new Update().set("value", new NestedDocument("adolin"));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithAliasedObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.renamed-value.name", "adolin"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.renamed-value._class", NestedDocument.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetrainTypeInformationWhenValueTypeOfMapDoesNotMatchItsDeclaration() {
|
||||
|
||||
Map<Object, Object> map = Collections.<Object, Object> singletonMap("szeth", new NestedDocument("son-son-vallano"));
|
||||
|
||||
Update update = new Update().set("map", map);
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.map.szeth.name", "son-son-vallano"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.map.szeth._class", NestedDocument.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldNotContainTypeInformationWhenValueTypeOfMapMatchesDeclaration() {
|
||||
|
||||
Map<Object, NestedDocument> map = Collections.<Object, NestedDocument> singletonMap("jasnah", new NestedDocument(
|
||||
"kholin"));
|
||||
|
||||
Update update = new Update().set("concreteMap", map);
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.concreteMap.jasnah.name", "kholin"));
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteMap.jasnah._class"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1250
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void mapsUpdateWithBothReadingAndWritingConverterRegistered() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(
|
||||
Arrays.asList(AllocationToStringConverter.INSTANCE, StringToAllocationConverter.INSTANCE));
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
|
||||
mappingContext.afterPropertiesSet();
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(mock(DbRefResolver.class), mappingContext);
|
||||
converter.setCustomConversions(conversions);
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
Update update = new Update().set("allocation", Allocation.AVAILABLE);
|
||||
DBObject result = mapper.getMappedObject(update.getUpdateObject(),
|
||||
mappingContext.getPersistentEntity(ClassWithEnum.class));
|
||||
|
||||
assertThat(result, isBsonObject().containing("$set.allocation", Allocation.AVAILABLE.code));
|
||||
}
|
||||
|
||||
/**
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForSimpleTypes() {
|
||||
|
||||
Update update = new Update().set("value", null);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ConcreteChildClass.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.containsField("value"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForJava8Date() {
|
||||
|
||||
Update update = new Update().set("date", null);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ClassWithJava8Date.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.containsField("date"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForCollectionTypes() {
|
||||
|
||||
Update update = new Update().set("values", null);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ListModel.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.containsField("values"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForPropertyOfNestedDocument() {
|
||||
|
||||
Update update = new Update().set("concreteValue.name", null);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.containsField("concreteValue.name"), is(true));
|
||||
assertThat($set.get("concreteValue.name"), nullValue());
|
||||
}
|
||||
|
||||
static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes {
|
||||
ListModelWrapper concreteTypeWithListAttributeOfInterfaceType;
|
||||
}
|
||||
|
||||
static class DomainTypeWithListOfConcreteTypesHavingSingleInterfaceTypeAttribute {
|
||||
List<WrapperAroundInterfaceType> listHoldingConcretyTypeWithInterfaceTypeAttribute;
|
||||
}
|
||||
|
||||
static class WrapperAroundInterfaceType {
|
||||
Model interfaceType;
|
||||
}
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = "DocumentWithReferenceToInterface")
|
||||
static interface DocumentWithReferenceToInterface {
|
||||
|
||||
@@ -631,7 +905,7 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
private @Id String id;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef //
|
||||
private InterfaceDocumentDefinitionWithoutId referencedDocument;
|
||||
|
||||
public String getId() {
|
||||
@@ -692,10 +966,10 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
String id;
|
||||
|
||||
@Field("aliased")//
|
||||
@Field("aliased") //
|
||||
List<? extends AbstractChildClass> list;
|
||||
|
||||
@Field//
|
||||
@Field //
|
||||
List<Model> listOfInterface;
|
||||
|
||||
public ParentClass(String id, List<? extends AbstractChildClass> list) {
|
||||
@@ -728,6 +1002,10 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
static class DomainEntity {
|
||||
List<NestedEntity> collectionOfNestedEntities;
|
||||
|
||||
public List<NestedEntity> getCollectionOfNestedEntities() {
|
||||
return collectionOfNestedEntities;
|
||||
}
|
||||
}
|
||||
|
||||
static class NestedEntity {
|
||||
@@ -753,10 +1031,10 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
@Id public String id;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef //
|
||||
public List<Entity> dbRefAnnotatedList;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef //
|
||||
public Entity dbRefProperty;
|
||||
}
|
||||
|
||||
@@ -770,4 +1048,87 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
@Field("mapped") DocumentWithDBRefCollection nested;
|
||||
}
|
||||
|
||||
static class DocumentWithNestedCollection {
|
||||
List<NestedDocument> nestedDocs;
|
||||
}
|
||||
|
||||
static class NestedDocument {
|
||||
|
||||
String name;
|
||||
|
||||
public NestedDocument(String name) {
|
||||
super();
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
static class EntityWithObject {
|
||||
|
||||
Object value;
|
||||
NestedDocument concreteValue;
|
||||
}
|
||||
|
||||
static class EntityWithAliasedObject {
|
||||
|
||||
@Field("renamed-value") Object value;
|
||||
}
|
||||
|
||||
static class EntityWithObjectMap {
|
||||
|
||||
Map<Object, Object> map;
|
||||
Map<Object, NestedDocument> concreteMap;
|
||||
}
|
||||
|
||||
static class ClassWithEnum {
|
||||
|
||||
Allocation allocation;
|
||||
|
||||
static enum Allocation {
|
||||
|
||||
AVAILABLE("V"), ALLOCATED("A");
|
||||
|
||||
String code;
|
||||
|
||||
private Allocation(String code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public static Allocation of(String code) {
|
||||
|
||||
for (Allocation value : values()) {
|
||||
if (value.code.equals(code)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
}
|
||||
|
||||
static enum AllocationToStringConverter implements Converter<Allocation, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public String convert(Allocation source) {
|
||||
return source.code;
|
||||
}
|
||||
}
|
||||
|
||||
static enum StringToAllocationConverter implements Converter<String, Allocation> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Allocation convert(String source) {
|
||||
return Allocation.of(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class ClassWithJava8Date {
|
||||
|
||||
LocalDate date;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.joda.time.LocalDate;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -49,6 +50,7 @@ import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
@@ -173,4 +175,13 @@ public abstract class AbstractGeoSpatialTests {
|
||||
assertThat(venues.size(), is(11));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1360
|
||||
*/
|
||||
@Test
|
||||
public void mapsQueryContainedInNearQuery() {
|
||||
|
||||
Query query = query(where("openingDate").lt(LocalDate.now()));
|
||||
template.geoNear(NearQuery.near(1.5, 1.7).query(query), Venue.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
* Copyright 2012-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,16 +18,20 @@ package org.springframework.data.mongodb.core.index;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.After;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.RuleChain;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.test.util.CleanMongoDB;
|
||||
import org.springframework.data.mongodb.test.util.MongoVersionRule;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
@@ -38,23 +42,22 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class MongoPersistentEntityIndexCreatorIntegrationTests {
|
||||
|
||||
public static @ClassRule MongoVersionRule version = MongoVersionRule.atLeast(new Version(2, 6));
|
||||
static final String SAMPLE_TYPE_COLLECTION_NAME = "sampleEntity";
|
||||
static final String RECURSIVE_TYPE_COLLECTION_NAME = "recursiveGenericTypes";
|
||||
|
||||
public static @ClassRule RuleChain rules = RuleChain.outerRule(MongoVersionRule.atLeast(new Version(2, 6))).around(
|
||||
CleanMongoDB.indexes(Arrays.asList(SAMPLE_TYPE_COLLECTION_NAME, RECURSIVE_TYPE_COLLECTION_NAME)));
|
||||
|
||||
@Autowired @Qualifier("mongo1") MongoOperations templateOne;
|
||||
|
||||
@Autowired @Qualifier("mongo2") MongoOperations templateTwo;
|
||||
|
||||
@After
|
||||
public void cleanUp() {
|
||||
templateOne.dropCollection(SampleEntity.class);
|
||||
templateTwo.dropCollection(SampleEntity.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void createsIndexForConfiguredMappingContextOnly() {
|
||||
|
||||
@@ -62,7 +65,42 @@ public class MongoPersistentEntityIndexCreatorIntegrationTests {
|
||||
assertThat(indexInfo, hasSize(greaterThan(0)));
|
||||
assertThat(indexInfo, Matchers.<IndexInfo> hasItem(hasProperty("name", is("prop"))));
|
||||
|
||||
indexInfo = templateTwo.indexOps("sampleEntity").getIndexInfo();
|
||||
indexInfo = templateTwo.indexOps(SAMPLE_TYPE_COLLECTION_NAME).getIndexInfo();
|
||||
assertThat(indexInfo, hasSize(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1202
|
||||
*/
|
||||
@Test
|
||||
public void shouldHonorIndexedPropertiesWithRecursiveMappings() {
|
||||
|
||||
List<IndexInfo> indexInfo = templateOne.indexOps(RecursiveConcreteType.class).getIndexInfo();
|
||||
|
||||
assertThat(indexInfo, hasSize(greaterThan(0)));
|
||||
assertThat(indexInfo, Matchers.<IndexInfo> hasItem(hasProperty("name", is("firstName"))));
|
||||
}
|
||||
|
||||
@Document(collection = RECURSIVE_TYPE_COLLECTION_NAME)
|
||||
static abstract class RecursiveGenericType<RGT extends RecursiveGenericType<RGT>> {
|
||||
|
||||
@Id Long id;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef RGT referrer;
|
||||
|
||||
@Indexed String firstName;
|
||||
|
||||
public RecursiveGenericType(Long id, String firstName, RGT referrer) {
|
||||
this.firstName = firstName;
|
||||
this.id = id;
|
||||
this.referrer = referrer;
|
||||
}
|
||||
}
|
||||
|
||||
static class RecursiveConcreteType extends RecursiveGenericType<RecursiveConcreteType> {
|
||||
|
||||
public RecursiveConcreteType(Long id, String firstName, RecursiveConcreteType referrer) {
|
||||
super(id, firstName, referrer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Suite;
|
||||
import org.junit.runners.Suite.SuiteClasses;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
@@ -148,6 +149,34 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
assertThat(indexDefinitions.get(0).getCollection(), equalTo("CollectionOverride"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1297
|
||||
*/
|
||||
@Test
|
||||
public void resolvesIndexOnDbrefWhenDefined() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(WithDbRef.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertThat(indexDefinitions.get(0).getCollection(), equalTo("withDbRef"));
|
||||
assertThat(indexDefinitions.get(0).getIndexKeys(), equalTo(new BasicDBObjectBuilder().add("indexedDbRef", 1)
|
||||
.get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1297
|
||||
*/
|
||||
@Test
|
||||
public void resolvesIndexOnDbrefWhenDefinedOnNestedElement() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(WrapperOfWithDbRef.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertThat(indexDefinitions.get(0).getCollection(), equalTo("wrapperOfWithDbRef"));
|
||||
assertThat(indexDefinitions.get(0).getIndexKeys(),
|
||||
equalTo(new BasicDBObjectBuilder().add("nested.indexedDbRef", 1).get()));
|
||||
}
|
||||
|
||||
@Document(collection = "Zero")
|
||||
static class IndexOnLevelZero {
|
||||
@Indexed String indexedProperty;
|
||||
@@ -182,6 +211,24 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
@Indexed @Field("customFieldName") String namedProperty;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class WrapperOfWithDbRef {
|
||||
WithDbRef nested;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class WithDbRef {
|
||||
|
||||
@Indexed//
|
||||
@DBRef//
|
||||
NoIndex indexedDbRef;
|
||||
}
|
||||
|
||||
@Document(collection = "no-index")
|
||||
static class NoIndex {
|
||||
@Id String id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -853,6 +900,19 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1263
|
||||
*/
|
||||
@Test
|
||||
public void shouldConsiderGenericTypeArgumentsOfCollectionElements() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(EntityWithGenericTypeWrapperAsElement.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("listWithGeneircTypeElement.entity.property_index"));
|
||||
}
|
||||
|
||||
@Document
|
||||
static class MixedIndexRoot {
|
||||
|
||||
@@ -1028,6 +1088,15 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested propertyWithIndexedStructure;
|
||||
}
|
||||
|
||||
static class GenericEntityWrapper<T> {
|
||||
T entity;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class EntityWithGenericTypeWrapperAsElement {
|
||||
List<GenericEntityWrapper<DocumentWithNamedIndex>> listWithGeneircTypeElement;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static List<IndexDefinitionHolder> prepareMappingContextAndResolveIndexForType(Class<?> type) {
|
||||
|
||||
@@ -143,6 +143,89 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
verify(propertyMock, never()).getActualType();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1157
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@Test(expected = MappingException.class)
|
||||
public void verifyShouldThrowErrorForLazyDBRefOnFinalClass() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock(
|
||||
org.springframework.data.mongodb.core.mapping.DBRef.class);
|
||||
when(propertyMock.isDbReference()).thenReturn(true);
|
||||
when(propertyMock.getDBRef()).thenReturn(dbRefMock);
|
||||
when(dbRefMock.lazy()).thenReturn(true);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) Class.class);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1157
|
||||
*/
|
||||
@Test(expected = MappingException.class)
|
||||
public void verifyShouldThrowErrorForLazyDBRefArray() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock(
|
||||
org.springframework.data.mongodb.core.mapping.DBRef.class);
|
||||
when(propertyMock.isDbReference()).thenReturn(true);
|
||||
when(propertyMock.getDBRef()).thenReturn(dbRefMock);
|
||||
when(dbRefMock.lazy()).thenReturn(true);
|
||||
when(propertyMock.isArray()).thenReturn(true);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1157
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public void verifyShouldPassForLazyDBRefOnNonArrayNonFinalClass() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock(
|
||||
org.springframework.data.mongodb.core.mapping.DBRef.class);
|
||||
when(propertyMock.isDbReference()).thenReturn(true);
|
||||
when(propertyMock.getDBRef()).thenReturn(dbRefMock);
|
||||
when(dbRefMock.lazy()).thenReturn(true);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) Object.class);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
|
||||
verify(propertyMock, times(1)).isDbReference();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1157
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public void verifyShouldPassForNonLazyDBRefOnFinalClass() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock(
|
||||
org.springframework.data.mongodb.core.mapping.DBRef.class);
|
||||
when(propertyMock.isDbReference()).thenReturn(true);
|
||||
when(propertyMock.getDBRef()).thenReturn(dbRefMock);
|
||||
when(dbRefMock.lazy()).thenReturn(false);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) Class.class);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
|
||||
verify(dbRefMock, times(1)).lazy();
|
||||
}
|
||||
|
||||
@Document(collection = "contacts")
|
||||
class Contact {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,17 +16,23 @@
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.data.mongodb.core.mapping.PersonPojoStringId;
|
||||
|
||||
public class PersonBeforeSaveListener implements ApplicationListener<BeforeSaveEvent<PersonPojoStringId>> {
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
public final ArrayList<ApplicationEvent> seenEvents = new ArrayList<ApplicationEvent>();
|
||||
public class PersonBeforeSaveListener extends AbstractMongoEventListener<PersonPojoStringId> {
|
||||
|
||||
public void onApplicationEvent(BeforeSaveEvent<PersonPojoStringId> event) {
|
||||
this.seenEvents.add(event);
|
||||
public final List<ApplicationEvent> seenEvents = new ArrayList<ApplicationEvent>();
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(java.lang.Object, com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeSave(PersonPojoStringId source, DBObject dbo) {
|
||||
seenEvents.add(new BeforeSaveEvent<PersonPojoStringId>(source, dbo));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,12 +15,40 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapreduce;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MapReduceOptionsTests {
|
||||
|
||||
@Test
|
||||
public void testFinalize() {
|
||||
new MapReduceOptions().finalizeFunction("code");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1334
|
||||
*/
|
||||
@Test
|
||||
public void limitShouldBeIncludedCorrectly() {
|
||||
|
||||
MapReduceOptions options = new MapReduceOptions();
|
||||
options.limit(10);
|
||||
|
||||
assertThat(options.getOptionsObject(), isBsonObject().containing("limit", 10));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1334
|
||||
*/
|
||||
@Test
|
||||
public void limitShouldNotBePresentInDboWhenNotSet() {
|
||||
assertThat(new MapReduceOptions().getOptionsObject(), isBsonObject().notContaining("limit"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,13 +118,13 @@ public class MapReduceTests {
|
||||
|
||||
int size = 0;
|
||||
for (ContentAndVersion cv : results) {
|
||||
if (cv.getId().equals("Resume")) {
|
||||
if ("Resume".equals(cv.getId())) {
|
||||
assertEquals(6, cv.getValue().longValue());
|
||||
}
|
||||
if (cv.getId().equals("Schema")) {
|
||||
if ("Schema".equals(cv.getId())) {
|
||||
assertEquals(2, cv.getValue().longValue());
|
||||
}
|
||||
if (cv.getId().equals("mongoDB How-To")) {
|
||||
if ("mongoDB How-To".equals(cv.getId())) {
|
||||
assertEquals(2, cv.getValue().longValue());
|
||||
}
|
||||
size++;
|
||||
@@ -141,13 +141,13 @@ public class MapReduceTests {
|
||||
new MapReduceOptions().outputCollection("jmr2_out"), NumberAndVersion.class);
|
||||
int size = 0;
|
||||
for (NumberAndVersion nv : results) {
|
||||
if (nv.getId().equals("1")) {
|
||||
if ("1".equals(nv.getId())) {
|
||||
assertEquals(2, nv.getValue().longValue());
|
||||
}
|
||||
if (nv.getId().equals("2")) {
|
||||
if ("2".equals(nv.getId())) {
|
||||
assertEquals(6, nv.getValue().longValue());
|
||||
}
|
||||
if (nv.getId().equals("3")) {
|
||||
if ("3".equals(nv.getId())) {
|
||||
assertEquals(2, nv.getValue().longValue());
|
||||
}
|
||||
size++;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,8 +18,7 @@ package org.springframework.data.mongodb.core.query;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import nl.jqno.equalsverifier.EqualsVerifier;
|
||||
import nl.jqno.equalsverifier.Warning;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
@@ -27,11 +26,15 @@ import org.springframework.data.domain.Sort.Direction;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
import nl.jqno.equalsverifier.EqualsVerifier;
|
||||
import nl.jqno.equalsverifier.Warning;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link BasicQuery}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author John Willemin
|
||||
*/
|
||||
public class BasicQueryUnitTests {
|
||||
|
||||
@@ -137,4 +140,48 @@ public class BasicQueryUnitTests {
|
||||
assertThat(query1, is(not(equalTo(query2))));
|
||||
assertThat(query1.hashCode(), is(not(query2.hashCode())));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1387
|
||||
*/
|
||||
@Test
|
||||
public void returnsFieldsCorrectly() {
|
||||
|
||||
String qry = "{ \"name\" : \"Thomas\"}";
|
||||
String fields = "{\"name\":1, \"age\":1}";
|
||||
|
||||
BasicQuery query1 = new BasicQuery(qry, fields);
|
||||
|
||||
assertThat(query1.getFieldsObject(), isBsonObject().containing("name").containing("age"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1387
|
||||
*/
|
||||
@Test
|
||||
public void handlesFieldsIncludeCorrectly() {
|
||||
|
||||
String qry = "{ \"name\" : \"Thomas\"}";
|
||||
|
||||
BasicQuery query1 = new BasicQuery(qry);
|
||||
query1.fields().include("name");
|
||||
|
||||
assertThat(query1.getFieldsObject(), isBsonObject().containing("name"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1387
|
||||
*/
|
||||
@Test
|
||||
public void combinesFieldsIncludeCorrectly() {
|
||||
|
||||
String qry = "{ \"name\" : \"Thomas\"}";
|
||||
String fields = "{\"name\":1, \"age\":1}";
|
||||
|
||||
BasicQuery query1 = new BasicQuery(qry, fields);
|
||||
query1.fields().include("gender");
|
||||
|
||||
assertThat(query1.getFieldsObject(), isBsonObject().containing("name").containing("age").containing("gender"));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,9 +23,11 @@ import java.util.Map;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Test cases for {@link Update}.
|
||||
@@ -484,4 +486,22 @@ public class UpdateTests {
|
||||
public void pushShouldThrowExceptionWhenGivenNegativePosition() {
|
||||
new Update().push("foo").atPosition(-1).each("booh");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1346
|
||||
*/
|
||||
@Test
|
||||
public void registersMultiplePullAllClauses() {
|
||||
|
||||
Update update = new Update();
|
||||
update.pullAll("field1", new String[] { "foo" });
|
||||
update.pullAll("field2", new String[] { "bar" });
|
||||
|
||||
DBObject updateObject = update.getUpdateObject();
|
||||
|
||||
DBObject pullAll = DBObjectTestUtils.getAsDBObject(updateObject, "$pullAll");
|
||||
|
||||
assertThat(pullAll.get("field1"), is(notNullValue()));
|
||||
assertThat(pullAll.get("field2"), is(notNullValue()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
/**
|
||||
* Sample contactt domain class.
|
||||
* Sample contact domain class.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -553,6 +553,128 @@ public class MongoQueryCreatorUnitTests {
|
||||
assertThat(query, is(query(where("address.geo").near(point).minDistance(10D).maxDistance(20D))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1229
|
||||
*/
|
||||
@Test
|
||||
public void appliesIgnoreCaseToLeafProperty() {
|
||||
|
||||
PartTree tree = new PartTree("findByAddressStreetIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "Street");
|
||||
|
||||
assertThat(new MongoQueryCreator(tree, accessor, context).createQuery(), is(notNullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSource() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "con.flux+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex("^\\Qcon.flux+\\E$", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameStartingWithIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "dawns.light+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex("^\\Qdawns.light+\\E", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameEndingWithIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "new.ton+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex("\\Qnew.ton+\\E$", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex(".*\\Qfire.fight+\\E.*"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex(".*\\Qsteel.heart+\\E"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "cala.mity+*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
assertThat(query, is(query(where("username").regex("\\Qcala.mity+\\E.*"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
assertThat(query, is(query(where("username").regex(".*"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1342
|
||||
*/
|
||||
@Test
|
||||
public void bindsNullValueToContainsClause() {
|
||||
|
||||
PartTree partTree = new PartTree("emailAddressesContains", User.class);
|
||||
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, new Object[] { null });
|
||||
Query query = new MongoQueryCreator(partTree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("emailAddresses").in((Object) null))));
|
||||
}
|
||||
|
||||
interface PersonRepository extends Repository<Person, Long> {
|
||||
|
||||
List<Person> findByLocationNearAndFirstname(Point location, Distance maxDistance, String firstname);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -177,6 +177,17 @@ public class MongoQueryMethodUnitTests {
|
||||
assertThat(method.getQueryMetaAttributes().getSnapshot(), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1266
|
||||
*/
|
||||
@Test
|
||||
public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod("deleteByUserName", String.class);
|
||||
|
||||
assertThat(method.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class)));
|
||||
}
|
||||
|
||||
private MongoQueryMethod queryMethod(String name, Class<?>... parameters) throws Exception {
|
||||
Method method = PersonRepository.class.getMethod(name, parameters);
|
||||
return new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), context);
|
||||
@@ -210,6 +221,10 @@ public class MongoQueryMethodUnitTests {
|
||||
@Meta(snapshot = true)
|
||||
List<User> metaWithSnapshotUsage();
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1266
|
||||
*/
|
||||
void deleteByUserName(String userName);
|
||||
}
|
||||
|
||||
interface SampleRepository extends Repository<Contact, Long> {
|
||||
|
||||
@@ -24,6 +24,9 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
|
||||
import org.bson.BSON;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
@@ -289,6 +292,23 @@ public class StringBasedMongoQueryUnitTests {
|
||||
assertThat(query.getQueryObject(), is(new BasicDBObjectBuilder().add("key", "value").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1290
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportNonQuotedBinaryDataReplacement() throws Exception {
|
||||
|
||||
byte[] binaryData = "Matthews".getBytes("UTF-8");
|
||||
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, binaryData);
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsBinary", byte[].class);
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
|
||||
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : { '$binary' : '"
|
||||
+ DatatypeConverter.printBase64Binary(binaryData) + "', '$type' : " + BSON.B_GENERAL + "}}");
|
||||
|
||||
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
|
||||
}
|
||||
|
||||
private StringBasedMongoQuery createQueryForMethod(String name, Class<?>... parameters) throws Exception {
|
||||
|
||||
Method method = SampleRepository.class.getMethod(name, parameters);
|
||||
@@ -301,6 +321,9 @@ public class StringBasedMongoQueryUnitTests {
|
||||
@Query("{ 'lastname' : ?0 }")
|
||||
Person findByLastname(String lastname);
|
||||
|
||||
@Query("{ 'lastname' : ?0 }")
|
||||
Person findByLastnameAsBinary(byte[] lastname);
|
||||
|
||||
@Query("{ 'lastname' : '?0' }")
|
||||
Person findByLastnameQuoted(String lastname);
|
||||
|
||||
|
||||
@@ -90,6 +90,10 @@ public class IsBsonObject<T extends BSONObject> extends TypeSafeMatcher<T> {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (o != null && expectation.not) {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ Import-Template:
|
||||
javax.tools.*;version="0",
|
||||
javax.net.*;version="0",
|
||||
javax.validation.*;version="${validation:[=.=.=.=,+1.0.0)}";resolution:=optional,
|
||||
javax.xml.bind.*;version=0,
|
||||
org.aopalliance.*;version="[1.0.0, 2.0.0)";resolution:=optional,
|
||||
org.bson.*;version="0",
|
||||
org.objenesis.*;version="${objenesis:[=.=.=, +1.0.0)}";resolution:=optional,
|
||||
|
||||
@@ -16,6 +16,7 @@ include::preface.adoc[]
|
||||
|
||||
:leveloffset: +1
|
||||
include::new-features.adoc[]
|
||||
include::{spring-data-commons-docs}/dependencies.adoc[]
|
||||
include::{spring-data-commons-docs}/repositories.adoc[]
|
||||
:leveloffset: -1
|
||||
|
||||
|
||||
@@ -211,6 +211,7 @@ The MappingMongoConverter can use metadata to drive the mapping of objects to do
|
||||
* `@PersistenceConstructor` - marks a given constructor - even a package protected one - to use when instantiating the object from the database. Constructor arguments are mapped by name to the key values in the retrieved DBObject.
|
||||
* `@Value` - this annotation is part of the Spring Framework . Within the mapping framework it can be applied to constructor arguments. This lets you use a Spring Expression Language statement to transform a key's value retrieved in the database before it is used to construct a domain object. In order to reference a property of a given document one has to use expressions like: `@Value("#root.myProperty")` where `root` refers to the root of the given document.
|
||||
* `@Field` - applied at the field level and described the name of the field as it will be represented in the MongoDB BSON document thus allowing the name to be different than the fieldname of the class.
|
||||
* `@Version` - applied at field level is used for optimistic locking and checked for modification on save operations. The initial value is `zero` which is bumped automatically on every update.
|
||||
|
||||
The mapping metadata infrastructure is defined in a seperate spring-data-commons project that is technology agnostic. Specific subclasses are using in the MongoDB support to support annotation based metadata. Other strategies are also possible to put in place if there is demand.
|
||||
|
||||
|
||||
@@ -381,7 +381,7 @@ MongoDB repository support integrates with the http://www.querydsl.com/[QueryDSL
|
||||
* Adopts better to refactoring changes in domain types
|
||||
* Incremental query definition is easier
|
||||
|
||||
Please refer to the QueryDSL documentation which describes how to bootstrap your environment for APT based code generation http://source.mysema.com/static/querydsl/2.1.2/reference/html/ch02.html#d0e112[using Maven] or http://source.mysema.com/static/querydsl/2.1.2/reference/html/ch02.html#d0e131[using Ant].
|
||||
Please refer to the http://www.querydsl.com/static/querydsl/latest/reference/html/[QueryDSL documentation] which describes how to bootstrap your environment for APT based code generation using Maven or Ant.
|
||||
|
||||
Using QueryDSL you will be able to write queries as shown below
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ You will also need to add the location of the Spring Milestone repository for ma
|
||||
</repositories>
|
||||
----
|
||||
|
||||
The repository is also http://shrub.appspot.com/maven.springframework.org/milestone/org/springframework/data/[browseable here].
|
||||
The repository is also http://repo.spring.io/milestone/org/springframework/data/[browseable here].
|
||||
|
||||
You may also want to set the logging level to `DEBUG` to see some additional information, edit the log4j.properties file to have
|
||||
|
||||
@@ -195,11 +195,11 @@ public class AppConfig {
|
||||
|
||||
This approach allows you to use the standard `com.mongodb.Mongo` API that you may already be used to using but also pollutes the code with the UnknownHostException checked exception. The use of the checked exception is not desirable as Java based bean metadata uses methods as a means to set object dependencies, making the calling code cluttered.
|
||||
|
||||
An alternative is to register an instance of `com.mongodb.Mongo` instance with the container using Spring's` MongoFactoryBean`. As compared to instantiating a `com.mongodb.Mongo` instance directly, the FactoryBean approach does not throw a checked exception and has the added advantage of also providing the container with an ExceptionTranslator implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annoated with the `@Repository` annotation. This hierarchy and use of `@Repository` is described in http://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html[Spring's DAO support features].
|
||||
An alternative is to register an instance of `com.mongodb.Mongo` instance with the container using Spring's `MongoClientFactoryBean`. As compared to instantiating a `com.mongodb.Mongo` instance directly, the FactoryBean approach does not throw a checked exception and has the added advantage of also providing the container with an ExceptionTranslator implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annoated with the `@Repository` annotation. This hierarchy and use of `@Repository` is described in http://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html[Spring's DAO support features].
|
||||
|
||||
An example of a Java based bean metadata that supports exception translation on `@Repository` annotated classes is shown below:
|
||||
|
||||
.Registering a com.mongodb.Mongo object using Spring's MongoFactoryBean and enabling Spring's exception translation support
|
||||
.Registering a com.mongodb.Mongo object using Spring's MongoClientFactoryBean and enabling Spring's exception translation support
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@@ -209,8 +209,8 @@ public class AppConfig {
|
||||
/*
|
||||
* Factory bean that creates the com.mongodb.Mongo instance
|
||||
*/
|
||||
public @Bean MongoFactoryBean mongo() {
|
||||
MongoFactoryBean mongo = new MongoFactoryBean();
|
||||
public @Bean MongoClientFactoryBean mongo() {
|
||||
MongoClientFactoryBean mongo = new MongoClientFactoryBean();
|
||||
mongo.setHost("localhost");
|
||||
return mongo;
|
||||
}
|
||||
@@ -218,7 +218,7 @@ public class AppConfig {
|
||||
----
|
||||
====
|
||||
|
||||
To access the `com.mongodb.Mongo` object created by the `MongoFactoryBean` in other `@Configuration` or your own classes, use a "`private @Autowired Mongo mongo;`" field.
|
||||
To access the `com.mongodb.Mongo` object created by the `MongoClientFactoryBean` in other `@Configuration` or your own classes, use a "`private @Autowired Mongo mongo;`" field.
|
||||
|
||||
[[mongo.mongo-xml-config]]
|
||||
=== Registering a Mongo instance using XML based metadata
|
||||
@@ -938,6 +938,40 @@ You can use several overloaded methods to remove an object from the database.
|
||||
|
||||
* *remove* Remove the given document based on one of the following: a specific object instance, a query document criteria combined with a class or a query document criteria combined with a specific collection name.
|
||||
|
||||
[[mongo-template.optimistic-locking]]
|
||||
=== Optimistic locking
|
||||
|
||||
The `@Version` annotation provides a JPA similar semantic in the context of MongoDB and makes sure updates are only applied to documents with matching version. Therefore the actual value of the version property is added to the update query in a way that the update won't have any effect if another operation altered the document in between. In that case an `OptimisticLockingFailureException` is thrown.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
class Person {
|
||||
|
||||
@Id String id;
|
||||
String firstname;
|
||||
String lastname;
|
||||
@Version Long version;
|
||||
}
|
||||
|
||||
Person daenerys = template.insert(new Person("Daenerys")); <1>
|
||||
|
||||
Person tmp = teplate.findOne(query(where("id").is(daenerys.getId())), Person.class); <2>
|
||||
|
||||
daenerys.setLastname("Targaryen");
|
||||
template.save(daenerys); <3>
|
||||
|
||||
template.save(tmp); // throws OptimisticLockingFailureException <4>
|
||||
----
|
||||
<1> Intially insert document. `version` is set to `0`.
|
||||
<2> Load the just inserted document `version` is still `0`.
|
||||
<3> Update document with `version = 0`. Set the `lastname` and bump `version` to `1`.
|
||||
<4> Try to update previously loaded document sill having `version = 0` fails with `OptimisticLockingFailureException` as the current `version` is `1`.
|
||||
====
|
||||
|
||||
IMPORTANT: Using MongoDB driver version 3 requires to set the `WriteConcern` to `ACKNOWLEDGED`. Otherwise `OptimisticLockingFailureException` can be silently swallowed.
|
||||
|
||||
[[mongo.query]]
|
||||
== Querying Documents
|
||||
|
||||
@@ -1025,7 +1059,7 @@ The `Query` class has some additional methods used to provide options for the qu
|
||||
* `Field` *fields* `()` used to define fields to be included in the query results
|
||||
* `Query` *limit* `(int limit)` used to limit the size of the returned results to the provided limit (used for paging)
|
||||
* `Query` *skip* `(int skip)` used to skip the provided number of documents in the results (used for paging)
|
||||
* `Sort` *sort* `()` used to provide sort definition for the results
|
||||
* `Query` *with* `(Sort sort)` used to provide sort definition for the results
|
||||
|
||||
[[mongo-template.querying]]
|
||||
=== Methods for querying for documents
|
||||
@@ -1159,13 +1193,13 @@ The geo near operations return a `GeoResults` wrapper object that encapsulates `
|
||||
[[mongo.geo-json]]
|
||||
=== GeoJSON Support
|
||||
|
||||
MongoDB supports http://geojeson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data.
|
||||
MongoDB supports http://geojson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data.
|
||||
|
||||
NOTE: Please refer to the http://docs.mongodb.org/manual/core/2dsphere/#geospatial-indexes-store-geojson/[MongoDB manual on GeoJSON support] to learn about requirements and restrictions.
|
||||
|
||||
==== GeoJSON types in domain classes
|
||||
|
||||
Usage of http://geojeson.org/[GeoJSON] types in domain classes is straight forward. The `org.springframework.data.mongodb.core.geo` package contains types like `GeoJsonPoint`, `GeoJsonPolygon` and others. Those are extensions to the existing `org.springframework.data.geo` types.
|
||||
Usage of http://geojson.org/[GeoJSON] types in domain classes is straight forward. The `org.springframework.data.mongodb.core.geo` package contains types like `GeoJsonPoint`, `GeoJsonPolygon` and others. Those are extensions to the existing `org.springframework.data.geo` types.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
@@ -2013,7 +2047,7 @@ class MyConverter implements Converter<Person, String> { … }
|
||||
class MyConverter implements Converter<String, Person> { … }
|
||||
----
|
||||
|
||||
In case you write a `Converter` whose source and target type are native Mongo types there's no way for us to determine whether we should consider it as reading or writing converter. Registering the converter instance as both might lead to unwanted results then. E.g. a `Converter<String, Long>` is ambiguous although it probably does not make sense to try to convert all `String`s into `Long`s when writing. To be generally able to force the infrastructure to register a converter for one way only we provide `@ReadingConverter` as well as `@WritingConverter` to be used at the converter implementation.
|
||||
In case you write a `Converter` whose source and target type are native Mongo types there's no way for us to determine whether we should consider it as reading or writing converter. Registering the converter instance as both might lead to unwanted results then. E.g. a `Converter<String, Long>` is ambiguous although it probably does not make sense to try to convert all `String` instances into `Long` instances when writing. To be generally able to force the infrastructure to register a converter for one way only we provide `@ReadingConverter` as well as `@WritingConverter` to be used at the converter implementation.
|
||||
|
||||
[[mongo-template.index-and-collections]]
|
||||
== Index and Collection management
|
||||
|
||||
@@ -1,6 +1,229 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 1.9.0.RELEASE (2016-04-06)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1407 - Add pull request template.
|
||||
* DATAMONGO-1405 - Release 1.9 GA (Hopper).
|
||||
* DATAMONGO-1401 - GeoJsonPoint error on update.
|
||||
* DATAMONGO-1398 - Update documentation for Spring Data MongoDB 1.9.
|
||||
* DATAMONGO-1396 - Exception when creating geo within Criteria using Aggregation.
|
||||
|
||||
|
||||
Changes in version 1.9.0.RC1 (2016-03-18)
|
||||
-----------------------------------------
|
||||
* DATAMONGO-1400 - Adapt to rename of Spring Data Commons' Tuple to Pair.
|
||||
* DATAMONGO-1397 - MongoTemplate.geoNear() do not log the Query.
|
||||
* DATAMONGO-1392 - Release 1.9 RC1 (Hopper).
|
||||
* DATAMONGO-1389 - Adapt test case to changes made for improved type prediction infrastructure.
|
||||
* DATAMONGO-1387 - BasicQuery.fields().include() doesn't stick, even though Query.fields().include() does.
|
||||
* DATAMONGO-1373 - Problem with custom annotations with AliasFor annotated attributes.
|
||||
* DATAMONGO-1326 - Add support for $lookup to aggregation.
|
||||
* DATAMONGO-1245 - Add support for Query-By-Example.
|
||||
|
||||
|
||||
Changes in version 1.8.4.RELEASE (2016-02-23)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1381 - Release 1.8.4 (Gosling SR4).
|
||||
* DATAMONGO-1380 - Improve logging in MongoChangeSetPersister.
|
||||
* DATAMONGO-1378 - Update reference documentation: Change Query.sort() to Query.with(Sort sort).
|
||||
* DATAMONGO-1377 - Update JavaDoc: Use @EnableMongoRepositories instead of @EnableJpaRepositories.
|
||||
* DATAMONGO-1376 - Move away from SimpleTypeInformationMapper.INSTANCE.
|
||||
* DATAMONGO-1375 - Fix typo in MongoOperations JavaDoc.
|
||||
* DATAMONGO-1361 - geoNear() queries fail when the accompanying query returns no results.
|
||||
* DATAMONGO-1360 - Cannot query with JSR310.
|
||||
* DATAMONGO-1270 - Update documentation to reflect deprecation of MongoFactoryBean.
|
||||
|
||||
|
||||
Changes in version 1.9.0.M1 (2016-02-12)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1380 - Improve logging in MongoChangeSetPersister.
|
||||
* DATAMONGO-1378 - Update reference documentation: Change Query.sort() to Query.with(Sort sort).
|
||||
* DATAMONGO-1377 - Update JavaDoc: Use @EnableMongoRepositories instead of @EnableJpaRepositories.
|
||||
* DATAMONGO-1376 - Move away from SimpleTypeInformationMapper.INSTANCE.
|
||||
* DATAMONGO-1375 - Fix typo in MongoOperations JavaDoc.
|
||||
* DATAMONGO-1372 - Add converter for Currency.
|
||||
* DATAMONGO-1371 - Add code of conduct.
|
||||
* DATAMONGO-1366 - Release 1.9 M1 (Hopper).
|
||||
* DATAMONGO-1361 - geoNear() queries fail when the accompanying query returns no results.
|
||||
* DATAMONGO-1360 - Cannot query with JSR310.
|
||||
* DATAMONGO-1349 - Upgrade to mongo-java-driver 2.14.0.
|
||||
* DATAMONGO-1346 - Cannot add two pullAll to an Update.
|
||||
* DATAMONGO-1345 - Add support for projections on repository query methods.
|
||||
* DATAMONGO-1342 - Potential NullPointerException in MongoQueryCreator.nextAsArray(…).
|
||||
* DATAMONGO-1341 - Remove package cycle between core and core.index.
|
||||
* DATAMONGO-1337 - General code quality improvements.
|
||||
* DATAMONGO-1335 - DBObjectAccessor doesn't write properties correctly if multiple ones are nested.
|
||||
* DATAMONGO-1334 - MapResultOptions limit not implemented.
|
||||
* DATAMONGO-1324 - StringToObjectIdConverter not properly registered causing drop in performance on identifier conversion.
|
||||
* DATAMONGO-1317 - Assert compatibility with MongoDB Java driver 3.2.
|
||||
* DATAMONGO-1314 - Fix typo in Exception message.
|
||||
* DATAMONGO-1312 - Cannot convert generic sub-document fields.
|
||||
* DATAMONGO-1303 - Add build profile for MongoDB 3.1 driver.
|
||||
* DATAMONGO-1302 - CustomConversions should allow registration of ConverterFactory.
|
||||
* DATAMONGO-1297 - Unique Index on DBRef.
|
||||
* DATAMONGO-1293 - MongoDbFactoryParser should allow id attribute in addition to client-uri.
|
||||
* DATAMONGO-1291 - Allow @Document to be used as meta-annotation.
|
||||
* DATAMONGO-1290 - @Query annotation with byte[] parameter does not work.
|
||||
* DATAMONGO-1289 - NullPointerException when saving an object with no "id" field or @Id annotation.
|
||||
* DATAMONGO-1288 - Update.inc(String, Number) method fails to work with AtomicInteger.
|
||||
* DATAMONGO-1287 - MappingMongoConverter eagerly fetches and converts lazy DbRef to change them afterwards by proxies.
|
||||
* DATAMONGO-1276 - MongoTemplate.CloseableIterableCursorAdapter does not null check return values from PersistenceExceptionTranslator.
|
||||
* DATAMONGO-1270 - Update documentation to reflect deprecation of MongoFactoryBean.
|
||||
* DATAMONGO-1238 - Support for Querydsl 4.
|
||||
* DATAMONGO-1204 - ObjectPath equality check breaks due to changes MongoDB V3.
|
||||
* DATAMONGO-1163 - Allow @Indexed to be used as meta-annotation.
|
||||
* DATAMONGO-934 - Add support for the bulk operations introduced in MongoDB 2.6.
|
||||
|
||||
|
||||
Changes in version 1.8.2.RELEASE (2015-12-18)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1355 - Release 1.8.2 (Gosling).
|
||||
* DATAMONGO-1346 - Cannot add two pullAll to an Update.
|
||||
* DATAMONGO-1342 - Potential NullPointerException in MongoQueryCreator.nextAsArray(…).
|
||||
* DATAMONGO-1337 - General code quality improvements.
|
||||
* DATAMONGO-1335 - DBObjectAccessor doesn't write properties correctly if multiple ones are nested.
|
||||
* DATAMONGO-1334 - MapResultOptions limit not implemented.
|
||||
* DATAMONGO-1324 - StringToObjectIdConverter not properly registered causing drop in performance on identifier conversion.
|
||||
* DATAMONGO-1317 - Assert compatibility with MongoDB Java driver 3.2.
|
||||
* DATAMONGO-1290 - @Query annotation with byte[] parameter does not work.
|
||||
* DATAMONGO-1289 - NullPointerException when saving an object with no "id" field or @Id annotation.
|
||||
* DATAMONGO-1287 - MappingMongoConverter eagerly fetches and converts lazy DbRef to change them afterwards by proxies.
|
||||
* DATAMONGO-1204 - ObjectPath equality check breaks due to changes MongoDB V3.
|
||||
|
||||
|
||||
Changes in version 1.8.1.RELEASE (2015-11-15)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1316 - Release 1.8.1 (Gosling).
|
||||
* DATAMONGO-1312 - Cannot convert generic sub-document fields.
|
||||
* DATAMONGO-1302 - CustomConversions should allow registration of ConverterFactory.
|
||||
* DATAMONGO-1297 - Unique Index on DBRef.
|
||||
* DATAMONGO-1293 - MongoDbFactoryParser should allow id attribute in addition to client-uri.
|
||||
* DATAMONGO-1276 - MongoTemplate.CloseableIterableCursorAdapter does not null check return values from PersistenceExceptionTranslator.
|
||||
|
||||
|
||||
Changes in version 1.6.4.RELEASE (2015-10-14)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1304 - Release 1.6.4 (Evans).
|
||||
|
||||
|
||||
Changes in version 1.8.0.RELEASE (2015-09-01)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1282 - Release 1.8 GA (Gosling).
|
||||
* DATAMONGO-1280 - Add what's new section to refrence documentation.
|
||||
* DATAMONGO-1275 - Reference documentation should mention support for optimistic locking.
|
||||
* DATAMONGO-1269 - QueryMapper drops numeric keys in Maps.
|
||||
* DATAMONGO-1256 - Provide a collectionName in MongoMappingEvents.
|
||||
|
||||
|
||||
Changes in version 1.8.0.RC1 (2015-08-04)
|
||||
-----------------------------------------
|
||||
* DATAMONGO-1268 - Release 1.8 RC1 (Gosling).
|
||||
* DATAMONGO-1266 - Repository query methods returning a primitive do not detect domain type correctly.
|
||||
* DATAMONGO-1260 - Prevent accidental authentication misconfiguration on SimpleMongoDbFactory.
|
||||
* DATAMONGO-1257 - mongo:mongo-client namespace does not support usernames with a comma.
|
||||
* DATAMONGO-1254 - Group after Project in aggregation uses incorrect field name.
|
||||
* DATAMONGO-1251 - update / findAndModify throws NullPointerException.
|
||||
* DATAMONGO-1250 - Custom converter implementation not used in updates.
|
||||
* DATAMONGO-1244 - StringBasedMongoQuery handles complex expression parameters incorrectly.
|
||||
* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile.
|
||||
* DATAMONGO-1236 - MongoOperations findAndModify and updateFirst do not include the _class in Map values.
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types.
|
||||
* DATAMONGO-1125 - Specify collection that triggers CommandFailureException.
|
||||
|
||||
|
||||
Changes in version 1.7.2.RELEASE (2015-07-28)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1261 - Release 1.7.2 (Fowler).
|
||||
* DATAMONGO-1260 - Prevent accidental authentication misconfiguration on SimpleMongoDbFactory.
|
||||
* DATAMONGO-1257 - mongo:mongo-client namespace does not support usernames with a comma.
|
||||
* DATAMONGO-1254 - Group after Project in aggregation uses incorrect field name.
|
||||
* DATAMONGO-1251 - update / findAndModify throws NullPointerException.
|
||||
* DATAMONGO-1250 - Custom converter implementation not used in updates.
|
||||
|
||||
|
||||
Changes in version 1.5.6.RELEASE (2015-07-01)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1246 - Release 1.5.6 (Dijkstra).
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0.
|
||||
|
||||
|
||||
Changes in version 1.6.3.RELEASE (2015-07-01)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1247 - Release 1.6.3 (Evans).
|
||||
* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile.
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation.
|
||||
* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release.
|
||||
* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types.
|
||||
* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0.
|
||||
* DATAMONGO-1153 - Fix documentation build.
|
||||
* DATAMONGO-1133 - Field aliasing is not honored in Aggregation operations.
|
||||
* DATAMONGO-1124 - Switch log level for cyclic reference index warnings from WARN to INFO.
|
||||
* DATAMONGO-1081 - Improve documentation on field mapping semantics.
|
||||
|
||||
|
||||
Changes in version 1.7.1.RELEASE (2015-06-30)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1248 - Release 1.7.1 (Fowler).
|
||||
* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile.
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1.
|
||||
* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation.
|
||||
* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality.
|
||||
* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1202 - Indexed annotation problems under generics.
|
||||
* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release.
|
||||
* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types.
|
||||
|
||||
|
||||
Changes in version 1.8.0.M1 (2015-06-02)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1228 - Release 1.8 M1 (Gosling).
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1218 - Deprecate non-MongoClient related configuration options in XML namespace.
|
||||
* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1.
|
||||
* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation.
|
||||
* DATAMONGO-1211 - Adapt API changes in Spring Data Commons to simplify custom repository base class registration.
|
||||
* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality.
|
||||
* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1202 - Indexed annotation problems under generics.
|
||||
* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release.
|
||||
* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver.
|
||||
* DATAMONGO-1192 - Switch back to Spring 4.1's CollectionFactory.
|
||||
* DATAMONGO-1134 - Add support for $geoIntersects.
|
||||
* DATAMONGO-990 - Add support for SpEL expressions in @Query.
|
||||
|
||||
|
||||
Changes in version 1.7.0.RELEASE (2015-03-23)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1189 - Release 1.7 GA.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 1.7 GA
|
||||
Spring Data MongoDB 1.7.2
|
||||
Copyright (c) [2010-2015] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
|
||||
Reference in New Issue
Block a user