Compare commits
42 Commits
1.8.0.M1
...
1.8.0.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
77dce53c7a | ||
|
|
73f268e7c4 | ||
|
|
075d7d8131 | ||
|
|
206337044a | ||
|
|
55b44ff7aa | ||
|
|
ae48639ae9 | ||
|
|
6b5e78f810 | ||
|
|
3e485e0a88 | ||
|
|
335c78f908 | ||
|
|
b103e4eaf6 | ||
|
|
c4a6c63d23 | ||
|
|
4a4f10f97b | ||
|
|
a5712daab7 | ||
|
|
28cb1ef106 | ||
|
|
0d99a3e527 | ||
|
|
9da43263ce | ||
|
|
784e199068 | ||
|
|
1ffee802c0 | ||
|
|
6f0ac7f0c2 | ||
|
|
941d4d8985 | ||
|
|
44c76d8ffb | ||
|
|
df9a9f5fb6 | ||
|
|
bebd0fa0e6 | ||
|
|
594e90789d | ||
|
|
f2ab42cb80 | ||
|
|
3224fa8ce7 | ||
|
|
ce156c1344 | ||
|
|
434e553022 | ||
|
|
de5b5ee4b0 | ||
|
|
60636bf56d | ||
|
|
1ca71f93e9 | ||
|
|
63ff39bed6 | ||
|
|
cb0b9604d4 | ||
|
|
1dbe3b62d7 | ||
|
|
5c0707d221 | ||
|
|
c4ffc37dd5 | ||
|
|
aaf93b0f6f | ||
|
|
23eab1e84f | ||
|
|
218f32e552 | ||
|
|
62fbe4d08c | ||
|
|
41ffd00619 | ||
|
|
98b9a604cf |
12
pom.xml
12
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.8.0.M1</version>
|
||||
<version>1.8.0.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.7.0.M1</version>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -28,7 +28,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.11.0.M1</springdata.commons>
|
||||
<springdata.commons>1.11.0.RELEASE</springdata.commons>
|
||||
<mongo>2.13.0</mongo>
|
||||
<mongo.osgi>2.13.0</mongo.osgi>
|
||||
</properties>
|
||||
@@ -123,7 +123,7 @@
|
||||
|
||||
<id>mongo3</id>
|
||||
<properties>
|
||||
<mongo>3.0.0</mongo>
|
||||
<mongo>3.0.2</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
@@ -156,8 +156,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.8.0.M1</version>
|
||||
<version>1.8.0.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.8.0.M1</version>
|
||||
<version>1.8.0.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.8.0.M1</version>
|
||||
<version>1.8.0.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.8.0.M1</version>
|
||||
<version>1.8.0.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.8.0.M1</version>
|
||||
<version>1.8.0.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -17,8 +17,11 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -28,10 +31,13 @@ import com.mongodb.MongoCredential;
|
||||
* Parse a {@link String} to a Collection of {@link MongoCredential}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @since 1.7
|
||||
*/
|
||||
public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
private static final Pattern GROUP_PATTERN = Pattern.compile("(\\\\?')(.*?)\\1");
|
||||
|
||||
private static final String AUTH_MECHANISM_KEY = "uri.authMechanism";
|
||||
private static final String USERNAME_PASSWORD_DELIMINATOR = ":";
|
||||
private static final String DATABASE_DELIMINATOR = "@";
|
||||
@@ -51,11 +57,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
List<MongoCredential> credentials = new ArrayList<MongoCredential>();
|
||||
|
||||
for (String credentialString : text.split(",")) {
|
||||
|
||||
if (!text.contains(USERNAME_PASSWORD_DELIMINATOR) || !text.contains(DATABASE_DELIMINATOR)) {
|
||||
throw new IllegalArgumentException("Credentials need to be in format 'username:password@database'!");
|
||||
}
|
||||
for (String credentialString : extractCredentialsString(text)) {
|
||||
|
||||
String[] userNameAndPassword = extractUserNameAndPassword(credentialString);
|
||||
String database = extractDB(credentialString);
|
||||
@@ -68,43 +70,83 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
String authMechanism = options.getProperty(AUTH_MECHANISM_KEY);
|
||||
|
||||
if (MongoCredential.GSSAPI_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0]));
|
||||
} else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createMongoX509Credential(userNameAndPassword[0]));
|
||||
} else if (MongoCredential.PLAIN_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createPlainCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.SCRAM_SHA_1_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createScramSha1Credential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
credentials.add(MongoCredential.createCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(
|
||||
MongoCredential.createCredential(userNameAndPassword[0], database, userNameAndPassword[1].toCharArray()));
|
||||
}
|
||||
}
|
||||
|
||||
setValue(credentials);
|
||||
}
|
||||
|
||||
private List<String> extractCredentialsString(String source) {
|
||||
|
||||
Matcher matcher = GROUP_PATTERN.matcher(source);
|
||||
List<String> list = new ArrayList<String>();
|
||||
|
||||
while (matcher.find()) {
|
||||
|
||||
String value = StringUtils.trimLeadingCharacter(matcher.group(), '\'');
|
||||
list.add(StringUtils.trimTrailingCharacter(value, '\''));
|
||||
}
|
||||
|
||||
if (!list.isEmpty()) {
|
||||
return list;
|
||||
}
|
||||
|
||||
return Arrays.asList(source.split(","));
|
||||
}
|
||||
|
||||
private static String[] extractUserNameAndPassword(String text) {
|
||||
|
||||
int dbSeperationIndex = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
String userNameAndPassword = text.substring(0, dbSeperationIndex);
|
||||
return userNameAndPassword.split(USERNAME_PASSWORD_DELIMINATOR);
|
||||
int index = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
|
||||
index = index != -1 ? index : text.lastIndexOf(OPTIONS_DELIMINATOR);
|
||||
|
||||
return index == -1 ? new String[] {} : text.substring(0, index).split(USERNAME_PASSWORD_DELIMINATOR);
|
||||
}
|
||||
|
||||
private static String extractDB(String text) {
|
||||
|
||||
int dbSeperationIndex = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
|
||||
if (dbSeperationIndex == -1) {
|
||||
return "";
|
||||
}
|
||||
|
||||
String tmp = text.substring(dbSeperationIndex + 1);
|
||||
int optionsSeperationIndex = tmp.lastIndexOf(OPTIONS_DELIMINATOR);
|
||||
|
||||
@@ -129,4 +171,28 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
return properties;
|
||||
}
|
||||
|
||||
private static void verifyUsernameAndPasswordPresent(String[] source) {
|
||||
|
||||
verifyUserNamePresent(source);
|
||||
|
||||
if (source.length != 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Credentials need to specify username and password like in 'username:password@database'!");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyDatabasePresent(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyUserNamePresent(String[] source) {
|
||||
|
||||
if (source.length == 0 || !StringUtils.hasText(source[0])) {
|
||||
throw new IllegalArgumentException("Credentials need to specify username!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ import com.mongodb.DBCursor;
|
||||
interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns th eprepared cursor.
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param cursor
|
||||
*/
|
||||
|
||||
@@ -49,7 +49,7 @@ public class MongoAction {
|
||||
* @param collectionName the collection name, must not be {@literal null} or empty.
|
||||
* @param entityType the POJO that is being operated against
|
||||
* @param document the converted DBObject from the POJO or Spring Update object
|
||||
* @param query the converted DBOjbect from the Spring Query object
|
||||
* @param query the converted DBObject from the Spring Query object
|
||||
*/
|
||||
public MongoAction(WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation,
|
||||
String collectionName, Class<?> entityType, DBObject document, DBObject query) {
|
||||
|
||||
@@ -199,8 +199,8 @@ public abstract class MongoDbUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if credentials present. In case we're using a monog-java-driver version 3 or above we do not have the need
|
||||
* for authentication as the auth data has to be provied within the MongoClient
|
||||
* Check if credentials present. In case we're using a mongo-java-driver version 3 or above we do not have the need
|
||||
* for authentication as the auth data has to be provided within the MongoClient
|
||||
*
|
||||
* @param credentials
|
||||
* @return
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -25,6 +26,7 @@ import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.dao.DuplicateKeyException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.util.ClassUtils;
|
||||
@@ -86,12 +88,15 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
|
||||
int code = ((MongoException) ex).getCode();
|
||||
|
||||
if (code == 11000 || code == 11001) {
|
||||
if (MongoDbErrorCodes.isDuplicateKeyCode(code)) {
|
||||
throw new DuplicateKeyException(ex.getMessage(), ex);
|
||||
} else if (code == 12000 || code == 13440) {
|
||||
} else if (MongoDbErrorCodes.isDataAccessResourceFailureCode(code)) {
|
||||
throw new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
} else if (code == 10003 || code == 12001 || code == 12010 || code == 12011 || code == 12012) {
|
||||
} else if (MongoDbErrorCodes.isInvalidDataAccessApiUsageCode(code) || code == 10003 || code == 12001
|
||||
|| code == 12010 || code == 12011 || code == 12012) {
|
||||
throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
} else if (MongoDbErrorCodes.isPermissionDeniedCode(code)) {
|
||||
throw new PermissionDeniedDataAccessException(ex.getMessage(), ex);
|
||||
}
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
@@ -101,4 +106,126 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
// that translation should not occur.
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoDbErrorCodes} holds MongoDB specific error codes outlined in {@literal mongo/base/error_codes.err}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
public static final class MongoDbErrorCodes {
|
||||
|
||||
static HashMap<Integer, String> dataAccessResourceFailureCodes;
|
||||
static HashMap<Integer, String> dataIntegrityViolationCodes;
|
||||
static HashMap<Integer, String> duplicateKeyCodes;
|
||||
static HashMap<Integer, String> invalidDataAccessApiUsageExeption;
|
||||
static HashMap<Integer, String> permissionDeniedCodes;
|
||||
|
||||
static HashMap<Integer, String> errorCodes;
|
||||
|
||||
static {
|
||||
|
||||
dataAccessResourceFailureCodes = new HashMap<Integer, String>(10);
|
||||
dataAccessResourceFailureCodes.put(6, "HostUnreachable");
|
||||
dataAccessResourceFailureCodes.put(7, "HostNotFound");
|
||||
dataAccessResourceFailureCodes.put(89, "NetworkTimeout");
|
||||
dataAccessResourceFailureCodes.put(91, "ShutdownInProgress");
|
||||
dataAccessResourceFailureCodes.put(12000, "SlaveDelayDifferential");
|
||||
dataAccessResourceFailureCodes.put(10084, "CannotFindMapFile64Bit");
|
||||
dataAccessResourceFailureCodes.put(10085, "CannotFindMapFile");
|
||||
dataAccessResourceFailureCodes.put(10357, "ShutdownInProgress");
|
||||
dataAccessResourceFailureCodes.put(10359, "Header==0");
|
||||
dataAccessResourceFailureCodes.put(13440, "BadOffsetInFile");
|
||||
dataAccessResourceFailureCodes.put(13441, "BadOffsetInFile");
|
||||
dataAccessResourceFailureCodes.put(13640, "DataFileHeaderCorrupt");
|
||||
|
||||
dataIntegrityViolationCodes = new HashMap<Integer, String>(6);
|
||||
dataIntegrityViolationCodes.put(67, "CannotCreateIndex");
|
||||
dataIntegrityViolationCodes.put(68, "IndexAlreadyExists");
|
||||
dataIntegrityViolationCodes.put(85, "IndexOptionsConflict");
|
||||
dataIntegrityViolationCodes.put(86, "IndexKeySpecsConflict");
|
||||
dataIntegrityViolationCodes.put(112, "WriteConflict");
|
||||
dataIntegrityViolationCodes.put(117, "ConflictingOperationInProgress");
|
||||
|
||||
duplicateKeyCodes = new HashMap<Integer, String>(3);
|
||||
duplicateKeyCodes.put(3, "OBSOLETE_DuplicateKey");
|
||||
duplicateKeyCodes.put(84, "DuplicateKeyValue");
|
||||
duplicateKeyCodes.put(11000, "DuplicateKey");
|
||||
duplicateKeyCodes.put(11001, "DuplicateKey");
|
||||
|
||||
invalidDataAccessApiUsageExeption = new HashMap<Integer, String>();
|
||||
invalidDataAccessApiUsageExeption.put(5, "GraphContainsCycle");
|
||||
invalidDataAccessApiUsageExeption.put(9, "FailedToParse");
|
||||
invalidDataAccessApiUsageExeption.put(14, "TypeMismatch");
|
||||
invalidDataAccessApiUsageExeption.put(15, "Overflow");
|
||||
invalidDataAccessApiUsageExeption.put(16, "InvalidLength");
|
||||
invalidDataAccessApiUsageExeption.put(20, "IllegalOperation");
|
||||
invalidDataAccessApiUsageExeption.put(21, "EmptyArrayOperation");
|
||||
invalidDataAccessApiUsageExeption.put(22, "InvalidBSON");
|
||||
invalidDataAccessApiUsageExeption.put(23, "AlreadyInitialized");
|
||||
invalidDataAccessApiUsageExeption.put(29, "NonExistentPath");
|
||||
invalidDataAccessApiUsageExeption.put(30, "InvalidPath");
|
||||
invalidDataAccessApiUsageExeption.put(40, "ConflictingUpdateOperators");
|
||||
invalidDataAccessApiUsageExeption.put(45, "UserDataInconsistent");
|
||||
invalidDataAccessApiUsageExeption.put(30, "DollarPrefixedFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(52, "InvalidPath");
|
||||
invalidDataAccessApiUsageExeption.put(53, "InvalidIdField");
|
||||
invalidDataAccessApiUsageExeption.put(54, "NotSingleValueField");
|
||||
invalidDataAccessApiUsageExeption.put(55, "InvalidDBRef");
|
||||
invalidDataAccessApiUsageExeption.put(56, "EmptyFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(57, "DottedFieldName");
|
||||
invalidDataAccessApiUsageExeption.put(59, "CommandNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(60, "DatabaseNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(61, "ShardKeyNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(62, "OplogOperationUnsupported");
|
||||
invalidDataAccessApiUsageExeption.put(66, "ImmutableField");
|
||||
invalidDataAccessApiUsageExeption.put(72, "InvalidOptions");
|
||||
invalidDataAccessApiUsageExeption.put(115, "CommandNotSupported");
|
||||
invalidDataAccessApiUsageExeption.put(116, "DocTooLargeForCapped");
|
||||
invalidDataAccessApiUsageExeption.put(130, "SymbolNotFound");
|
||||
invalidDataAccessApiUsageExeption.put(17280, "KeyTooLong");
|
||||
invalidDataAccessApiUsageExeption.put(13334, "ShardKeyTooBig");
|
||||
|
||||
permissionDeniedCodes = new HashMap<Integer, String>();
|
||||
permissionDeniedCodes.put(11, "UserNotFound");
|
||||
permissionDeniedCodes.put(18, "AuthenticationFailed");
|
||||
permissionDeniedCodes.put(31, "RoleNotFound");
|
||||
permissionDeniedCodes.put(32, "RolesNotRelated");
|
||||
permissionDeniedCodes.put(33, "PrvilegeNotFound");
|
||||
permissionDeniedCodes.put(15847, "CannotAuthenticate");
|
||||
permissionDeniedCodes.put(16704, "CannotAuthenticateToAdminDB");
|
||||
permissionDeniedCodes.put(16705, "CannotAuthenticateToAdminDB");
|
||||
|
||||
errorCodes = new HashMap<Integer, String>();
|
||||
errorCodes.putAll(dataAccessResourceFailureCodes);
|
||||
errorCodes.putAll(dataIntegrityViolationCodes);
|
||||
errorCodes.putAll(duplicateKeyCodes);
|
||||
errorCodes.putAll(invalidDataAccessApiUsageExeption);
|
||||
errorCodes.putAll(permissionDeniedCodes);
|
||||
}
|
||||
|
||||
public static boolean isDataIntegrityViolationCode(Integer errorCode) {
|
||||
return errorCode == null ? false : dataIntegrityViolationCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isDataAccessResourceFailureCode(Integer errorCode) {
|
||||
return errorCode == null ? false : dataAccessResourceFailureCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isDuplicateKeyCode(Integer errorCode) {
|
||||
return errorCode == null ? false : duplicateKeyCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isPermissionDeniedCode(Integer errorCode) {
|
||||
return errorCode == null ? false : permissionDeniedCodes.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static boolean isInvalidDataAccessApiUsageCode(Integer errorCode) {
|
||||
return errorCode == null ? false : invalidDataAccessApiUsageExeption.containsKey(errorCode);
|
||||
}
|
||||
|
||||
public static String getErrorDescription(Integer errorCode) {
|
||||
return errorCode == null ? null : errorCodes.get(errorCode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -338,7 +338,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBCursor cursor = collection.find(mappedQuery, mappedFields);
|
||||
QueryCursorPreparer cursorPreparer = new QueryCursorPreparer(query, entityType);
|
||||
|
||||
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType);
|
||||
ReadDbObjectCallback<T> readCallback = new ReadDbObjectCallback<T>(mongoConverter, entityType, collection
|
||||
.getName());
|
||||
|
||||
return new CloseableIterableCusorAdapter<T>(cursorPreparer.prepare(cursor), exceptionTranslator, readCallback);
|
||||
}
|
||||
@@ -385,7 +386,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
CommandResult result = execute(new DbCallback<CommandResult>() {
|
||||
public CommandResult doInDB(DB db) throws MongoException, DataAccessException {
|
||||
return db.command(command, readPreference);
|
||||
return readPreference != null ? db.command(command, readPreference) : db.command(command);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -632,12 +633,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
BasicDBObject command = new BasicDBObject("geoNear", collection);
|
||||
command.putAll(near.toDBObject());
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, this.readPreference);
|
||||
List<Object> results = (List<Object>) commandResult.get("results");
|
||||
results = results == null ? Collections.emptyList() : results;
|
||||
|
||||
DbObjectCallback<GeoResult<T>> callback = new GeoNearResultDbObjectCallback<T>(new ReadDbObjectCallback<T>(
|
||||
mongoConverter, entityClass), near.getMetric());
|
||||
mongoConverter, entityClass, collectionName), near.getMetric());
|
||||
List<GeoResult<T>> result = new ArrayList<GeoResult<T>>(results.size());
|
||||
|
||||
int index = 0;
|
||||
@@ -789,15 +790,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
initializeVersionProperty(objectToSave);
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave, collectionName));
|
||||
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc, collectionName));
|
||||
Object id = insertDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
|
||||
populateIdIfNecessary(objectToSave, id);
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc, collectionName));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -885,10 +886,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
initializeVersionProperty(o);
|
||||
BasicDBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(o));
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(o, collectionName));
|
||||
writer.write(o, dbDoc);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(o, dbDoc));
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(o, dbDoc, collectionName));
|
||||
dbObjectList.add(dbDoc);
|
||||
}
|
||||
List<ObjectId> ids = insertDBObjectList(collectionName, dbObjectList);
|
||||
@@ -896,7 +897,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
for (T obj : batchToSave) {
|
||||
if (i < ids.size()) {
|
||||
populateIdIfNecessary(obj, ids.get(i));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(obj, dbObjectList.get(i)));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(obj, dbObjectList.get(i), collectionName));
|
||||
}
|
||||
i++;
|
||||
}
|
||||
@@ -951,14 +952,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
BasicDBObject dbObject = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave, collectionName));
|
||||
this.mongoConverter.write(objectToSave, dbObject);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbObject));
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbObject, collectionName));
|
||||
Update update = Update.fromDBObject(dbObject, ID_FIELD);
|
||||
|
||||
doUpdate(collectionName, query, update, objectToSave.getClass(), false, false);
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbObject));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbObject, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -966,15 +967,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
assertUpdateableIdIfNotSet(objectToSave);
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave, collectionName));
|
||||
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc, collectionName));
|
||||
Object id = saveDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
|
||||
populateIdIfNecessary(objectToSave, id);
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc));
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc, collectionName));
|
||||
}
|
||||
|
||||
protected Object insertDBObject(final String collectionName, final DBObject dbDoc, final Class<?> entityClass) {
|
||||
@@ -1266,7 +1267,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return execute(collectionName, new CollectionCallback<WriteResult>() {
|
||||
public WriteResult doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
|
||||
maybeEmitEvent(new BeforeDeleteEvent<T>(queryObject, entityClass));
|
||||
maybeEmitEvent(new BeforeDeleteEvent<T>(queryObject, entityClass, collectionName));
|
||||
|
||||
DBObject dboq = queryMapper.getMappedObject(queryObject, entity);
|
||||
|
||||
@@ -1284,7 +1285,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
handleAnyWriteResultErrors(wr, dboq, MongoActionOperation.REMOVE);
|
||||
|
||||
maybeEmitEvent(new AfterDeleteEvent<T>(queryObject, entityClass));
|
||||
maybeEmitEvent(new AfterDeleteEvent<T>(queryObject, entityClass, collectionName));
|
||||
|
||||
return wr;
|
||||
}
|
||||
@@ -1292,13 +1293,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
public <T> List<T> findAll(Class<T> entityClass) {
|
||||
return executeFindMultiInternal(new FindCallback(null), null, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass), determineCollectionName(entityClass));
|
||||
return findAll(entityClass, determineCollectionName(entityClass));
|
||||
}
|
||||
|
||||
public <T> List<T> findAll(Class<T> entityClass, String collectionName) {
|
||||
return executeFindMultiInternal(new FindCallback(null), null, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass), collectionName);
|
||||
entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(String inputCollectionName, String mapFunction, String reduceFunction,
|
||||
@@ -1343,7 +1343,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass, inputCollectionName);
|
||||
|
||||
for (DBObject dbObject : mapReduceOutput.results()) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
@@ -1404,7 +1404,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("retval");
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass, inputCollectionName);
|
||||
|
||||
for (DBObject dbObject : resultSet) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
@@ -1503,10 +1503,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
CommandResult commandResult = executeCommand(command);
|
||||
CommandResult commandResult = executeCommand(command, this.readPreference);
|
||||
handleCommandError(commandResult, command);
|
||||
|
||||
return new AggregationResults<O>(returnPotentiallyMappedResults(outputType, commandResult), commandResult);
|
||||
return new AggregationResults<O>(returnPotentiallyMappedResults(outputType, commandResult, collectionName),
|
||||
commandResult);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1516,7 +1517,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* @param commandResult
|
||||
* @return
|
||||
*/
|
||||
private <O> List<O> returnPotentiallyMappedResults(Class<O> outputType, CommandResult commandResult) {
|
||||
private <O> List<O> returnPotentiallyMappedResults(Class<O> outputType, CommandResult commandResult,
|
||||
String collectionName) {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("result");
|
||||
@@ -1524,7 +1526,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
DbObjectCallback<O> callback = new UnwrapAndReadDbObjectCallback<O>(mongoConverter, outputType);
|
||||
DbObjectCallback<O> callback = new UnwrapAndReadDbObjectCallback<O>(mongoConverter, outputType, collectionName);
|
||||
|
||||
List<O> mappedResults = new ArrayList<O>();
|
||||
for (DBObject dbObject : resultSet) {
|
||||
@@ -1652,7 +1654,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), new ReadDbObjectCallback<T>(
|
||||
this.mongoConverter, entityClass), collectionName);
|
||||
this.mongoConverter, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1667,7 +1669,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass) {
|
||||
return doFind(collectionName, query, fields, entityClass, null, new ReadDbObjectCallback<T>(this.mongoConverter,
|
||||
entityClass));
|
||||
entityClass, collectionName));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1686,7 +1688,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
protected <T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<T> entityClass,
|
||||
CursorPreparer preparer) {
|
||||
return doFind(collectionName, query, fields, entityClass, preparer, new ReadDbObjectCallback<T>(mongoConverter,
|
||||
entityClass));
|
||||
entityClass, collectionName));
|
||||
}
|
||||
|
||||
protected <S, T> List<T> doFind(String collectionName, DBObject query, DBObject fields, Class<S> entityClass,
|
||||
@@ -1742,7 +1744,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
protected <T> T doFindAndModify(String collectionName, DBObject query, DBObject fields, DBObject sort,
|
||||
@@ -1768,7 +1770,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass), collectionName);
|
||||
new ReadDbObjectCallback<T>(readerToUse, entityClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2180,26 +2182,30 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
* {@link MongoReader}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private class ReadDbObjectCallback<T> implements DbObjectCallback<T> {
|
||||
|
||||
private final EntityReader<? super T, DBObject> reader;
|
||||
private final Class<T> type;
|
||||
private final String collectionName;
|
||||
|
||||
public ReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type, String collectionName) {
|
||||
|
||||
public ReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type) {
|
||||
Assert.notNull(reader);
|
||||
Assert.notNull(type);
|
||||
this.reader = reader;
|
||||
this.type = type;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
public T doWith(DBObject object) {
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<T>(object, type));
|
||||
maybeEmitEvent(new AfterLoadEvent<T>(object, type, collectionName));
|
||||
}
|
||||
T source = reader.read(type, object);
|
||||
if (null != source) {
|
||||
maybeEmitEvent(new AfterConvertEvent<T>(object, source));
|
||||
maybeEmitEvent(new AfterConvertEvent<T>(object, source, collectionName));
|
||||
}
|
||||
return source;
|
||||
}
|
||||
@@ -2207,8 +2213,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
class UnwrapAndReadDbObjectCallback<T> extends ReadDbObjectCallback<T> {
|
||||
|
||||
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type) {
|
||||
super(reader, type);
|
||||
public UnwrapAndReadDbObjectCallback(EntityReader<? super T, DBObject> reader, Class<T> type, String collectionName) {
|
||||
super(reader, type, collectionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -19,6 +19,7 @@ import java.net.UnknownHostException;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
@@ -103,8 +104,8 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
*/
|
||||
@Deprecated
|
||||
public SimpleMongoDbFactory(MongoURI uri) throws MongoException, UnknownHostException {
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())),
|
||||
true, uri.getDatabase());
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())), true,
|
||||
uri.getDatabase());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -132,6 +133,11 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
private SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
boolean mongoInstanceCreated, String authenticationDatabaseName) {
|
||||
|
||||
if (mongo instanceof MongoClient && (credentials != null && !UserCredentials.NO_CREDENTIALS.equals(credentials))) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"Usage of 'UserCredentials' with 'MongoClient' is no longer supported. Please use 'MongoCredential' for 'MongoClient' or just 'Mongo'.");
|
||||
}
|
||||
|
||||
Assert.notNull(mongo, "Mongo must not be null");
|
||||
Assert.hasText(databaseName, "Database name must not be empty");
|
||||
Assert.isTrue(databaseName.matches("[\\w-]+"),
|
||||
|
||||
@@ -88,7 +88,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either sythetic or non-synthetic way.
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either synthetic or non-synthetic way.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param synthetic
|
||||
@@ -107,7 +107,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} with the given orignals and synthetics.
|
||||
* Creates a new {@link ExposedFields} with the given originals and synthetics.
|
||||
*
|
||||
* @param originals must not be {@literal null}.
|
||||
* @param synthetic must not be {@literal null}.
|
||||
@@ -363,7 +363,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the referenve value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* Returns the reference value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -40,6 +41,7 @@ import com.mongodb.DBObject;
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
@@ -763,6 +765,20 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
return field;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#getExposedField()
|
||||
*/
|
||||
@Override
|
||||
public ExposedField getExposedField() {
|
||||
|
||||
if (!getField().isAliased()) {
|
||||
return super.getExposedField();
|
||||
}
|
||||
|
||||
return new ExposedField(new AggregationField(getField().getName()), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of this {@link OperationProjection} with the given alias.
|
||||
*
|
||||
|
||||
@@ -867,7 +867,7 @@ public class QueryMapper {
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -881,6 +881,28 @@ public class QueryMapper {
|
||||
return new AssociationConverter(getAssociation());
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class PositionParameterRetainingPropertyKeyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final KeyMapper keyMapper;
|
||||
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey) {
|
||||
this.keyMapper = new KeyMapper(rawKey);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
return keyMapper.mapPropertyName(source);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTypeHint()
|
||||
@@ -901,6 +923,61 @@ public class QueryMapper {
|
||||
|
||||
return NESTED_DOCUMENT;
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class KeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
public KeyMapper(String key) {
|
||||
|
||||
this.iterator = Arrays.asList(key.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
boolean inspect = iterator.hasNext();
|
||||
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
|
||||
if (isPositional) {
|
||||
mappedName += "." + partial;
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
}
|
||||
|
||||
return mappedName;
|
||||
}
|
||||
|
||||
private static boolean isPositionalParameter(String partial) {
|
||||
|
||||
if (partial.equals("$")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
Long.valueOf(partial);
|
||||
return true;
|
||||
} catch (NumberFormatException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,8 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
@@ -24,13 +22,11 @@ import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifier;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifiers;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -66,8 +62,8 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null)
|
||||
: converter.convertToMongoType(source, getTypeHintForEntity(entity));
|
||||
return converter.convertToMongoType(source,
|
||||
entity == null ? ClassTypeInformation.OBJECT : getTypeHintForEntity(source, entity));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -90,7 +86,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
return getMappedUpdateModifier(field, rawValue);
|
||||
}
|
||||
|
||||
return super.getMappedObjectForField(field, getMappedValue(field, rawValue));
|
||||
return super.getMappedObjectForField(field, rawValue);
|
||||
}
|
||||
|
||||
private Entry<String, Object> getMappedUpdateModifier(Field field, Object rawValue) {
|
||||
@@ -141,18 +137,20 @@ public class UpdateMapper extends QueryMapper {
|
||||
return new BasicDBObject(modifier.getKey(), value);
|
||||
}
|
||||
|
||||
private TypeInformation<?> getTypeHintForEntity(MongoPersistentEntity<?> entity) {
|
||||
return processTypeHintForNestedDocuments(entity.getTypeInformation());
|
||||
}
|
||||
|
||||
private TypeInformation<?> processTypeHintForNestedDocuments(TypeInformation<?> info) {
|
||||
private TypeInformation<?> getTypeHintForEntity(Object source, MongoPersistentEntity<?> entity) {
|
||||
|
||||
TypeInformation<?> info = entity.getTypeInformation();
|
||||
Class<?> type = info.getActualType().getType();
|
||||
if (type.isInterface() || java.lang.reflect.Modifier.isAbstract(type.getModifiers())) {
|
||||
|
||||
if (source == null || type.isInterface() || java.lang.reflect.Modifier.isAbstract(type.getModifiers())) {
|
||||
return info;
|
||||
}
|
||||
return NESTED_DOCUMENT;
|
||||
|
||||
if (!type.equals(source.getClass())) {
|
||||
return info;
|
||||
}
|
||||
|
||||
return NESTED_DOCUMENT;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -211,7 +209,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new UpdatePropertyConverter(key);
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -223,99 +221,6 @@ public class UpdateMapper extends QueryMapper {
|
||||
return new UpdateAssociationConverter(getAssociation(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Special mapper handling positional parameter {@literal $} within property names.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
*/
|
||||
private static class UpdateKeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
protected UpdateKeyMapper(String rawKey) {
|
||||
|
||||
Assert.hasText(rawKey, "Key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(rawKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
|
||||
boolean inspect = iterator.hasNext();
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
|
||||
boolean isPositional = isPositionalParameter(partial);
|
||||
if (isPositional) {
|
||||
mappedName += "." + partial;
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
}
|
||||
|
||||
return mappedName;
|
||||
}
|
||||
|
||||
boolean isPositionalParameter(String partial) {
|
||||
|
||||
if (partial.equals("$")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
Long.valueOf(partial);
|
||||
return true;
|
||||
} catch (NumberFormatException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Special {@link Converter} for {@link MongoPersistentProperty} instances that will concatenate the {@literal $}
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
*
|
||||
* @param updateKey must not be {@literal null} or empty.
|
||||
*/
|
||||
public UpdatePropertyConverter(String updateKey) {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.mapper = new UpdateKeyMapper(updateKey);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
return mapper.mapPropertyName(property);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} retaining positional parameter {@literal $} for {@link Association}s.
|
||||
*
|
||||
@@ -323,7 +228,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
protected static class UpdateAssociationConverter extends AssociationConverter {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
private final KeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
@@ -333,7 +238,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
public UpdateAssociationConverter(Association<MongoPersistentProperty> association, String key) {
|
||||
|
||||
super(association);
|
||||
this.mapper = new UpdateKeyMapper(key);
|
||||
this.mapper = new KeyMapper(key);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -21,15 +21,21 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator.MongoDbErrorCodes;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
/**
|
||||
* Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext}
|
||||
@@ -129,9 +135,34 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
}
|
||||
}
|
||||
|
||||
private void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
try {
|
||||
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
|
||||
} catch (MongoException ex) {
|
||||
|
||||
if (MongoDbErrorCodes.isDataIntegrityViolationCode(ex.getCode())) {
|
||||
|
||||
DBObject existingIndex = fetchIndexInformation(indexDefinition);
|
||||
String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'.";
|
||||
|
||||
if (existingIndex != null) {
|
||||
message += " Index already defined as '%s'.";
|
||||
}
|
||||
|
||||
throw new DataIntegrityViolationException(
|
||||
String.format(message, indexDefinition.getPath(), indexDefinition.getCollection(),
|
||||
indexDefinition.getIndexKeys(), indexDefinition.getIndexOptions(), existingIndex),
|
||||
ex);
|
||||
}
|
||||
|
||||
RuntimeException exceptionToThrow = mongoDbFactory.getExceptionTranslator().translateExceptionIfPossible(ex);
|
||||
|
||||
throw exceptionToThrow != null ? exceptionToThrow : ex;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -143,4 +174,28 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
public boolean isIndexCreatorFor(MappingContext<?, ?> context) {
|
||||
return this.mappingContext.equals(context);
|
||||
}
|
||||
|
||||
private DBObject fetchIndexInformation(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
if (indexDefinition == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
Object indexNameToLookUp = indexDefinition.getIndexOptions().get("name");
|
||||
|
||||
for (DBObject index : mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).getIndexInfo()) {
|
||||
if (ObjectUtils.nullSafeEquals(indexNameToLookUp, index.get("name"))) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug(
|
||||
String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()), e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,7 +108,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
try {
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(),
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
persistentProperty.getFieldName(), root.getCollection(), guard));
|
||||
}
|
||||
|
||||
@@ -135,7 +135,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
* @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property
|
||||
* types. Will never be {@code null}.
|
||||
*/
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final Class<?> type, final String path,
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final TypeInformation<?> type, final String path,
|
||||
final String collection, final CycleGuard guard) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
@@ -153,8 +153,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
try {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(), propertyDotPath,
|
||||
collection, guard));
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
propertyDotPath, collection, guard));
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
@@ -260,7 +260,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
LOGGER.info(e.getMessage(), e);
|
||||
} catch (InvalidDataAccessApiUsageException e) {
|
||||
LOGGER.info(
|
||||
String.format("Potentially invald index structure discovered. Breaking operation for %s.",
|
||||
String.format("Potentially invalid index structure discovered. Breaking operation for %s.",
|
||||
entity.getName()), e);
|
||||
}
|
||||
} else if (includeOptions.isForce() || indexed != null) {
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
@@ -305,28 +306,44 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
*/
|
||||
private static class PropertyTypeAssertionHandler implements PropertyHandler<MongoPersistentProperty> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.PropertyHandler#doWithPersistentProperty(org.springframework.data.mapping.PersistentProperty)
|
||||
*/
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
potentiallyAssertTextScoreType(persistentProperty);
|
||||
potentiallyAssertLanguageType(persistentProperty);
|
||||
potentiallyAssertDBRefTargetType(persistentProperty);
|
||||
}
|
||||
|
||||
private void potentiallyAssertLanguageType(MongoPersistentProperty persistentProperty) {
|
||||
private static void potentiallyAssertLanguageType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isExplicitLanguageProperty()) {
|
||||
assertPropertyType(persistentProperty, String.class);
|
||||
}
|
||||
}
|
||||
|
||||
private void potentiallyAssertTextScoreType(MongoPersistentProperty persistentProperty) {
|
||||
private static void potentiallyAssertTextScoreType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isTextScoreProperty()) {
|
||||
assertPropertyType(persistentProperty, Float.class, Double.class);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertPropertyType(MongoPersistentProperty persistentProperty, Class<?>... validMatches) {
|
||||
private static void potentiallyAssertDBRefTargetType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isDbReference() && persistentProperty.getDBRef().lazy()) {
|
||||
if (persistentProperty.isArray() || Modifier.isFinal(persistentProperty.getActualType().getModifiers())) {
|
||||
throw new MappingException(String.format(
|
||||
"Invalid lazy DBRef property for %s. Found %s which must not be an array nor a final class.",
|
||||
persistentProperty.getField(), persistentProperty.getActualType()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertPropertyType(MongoPersistentProperty persistentProperty, Class<?>... validMatches) {
|
||||
|
||||
for (Class<?> potentialMatch : validMatches) {
|
||||
if (ClassUtils.isAssignable(potentialMatch, persistentProperty.getActualType())) {
|
||||
@@ -334,10 +351,9 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
}
|
||||
}
|
||||
|
||||
throw new MappingException(String.format("Missmatching types for %s. Found %s expected one of %s.",
|
||||
persistentProperty.getField(), persistentProperty.getActualType(),
|
||||
StringUtils.arrayToCommaDelimitedString(validMatches)));
|
||||
throw new MappingException(
|
||||
String.format("Missmatching types for %s. Found %s expected one of %s.", persistentProperty.getField(),
|
||||
persistentProperty.getActualType(), StringUtils.arrayToCommaDelimitedString(validMatches)));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
int getFieldOrder();
|
||||
|
||||
/**
|
||||
* Returns whether the propert is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* Returns whether the property is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* {@link #getDBRef()} to return an non-{@literal null} value.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,6 +21,7 @@ import com.mongodb.DBObject;
|
||||
* Base class for delete events.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class AbstractDeleteEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
|
||||
@@ -31,11 +32,25 @@ public abstract class AbstractDeleteEvent<T> extends MongoMappingEvent<DBObject>
|
||||
* Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type , possibly be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AbstractDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AbstractDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
super(dbo, dbo);
|
||||
/**
|
||||
* Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AbstractDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
|
||||
super(dbo, dbo, collectionName);
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.DBObject;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class AbstractMongoEventListener<E> implements ApplicationListener<MongoMappingEvent<?>> {
|
||||
|
||||
@@ -46,14 +47,14 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public void onApplicationEvent(MongoMappingEvent<?> event) {
|
||||
|
||||
if (event instanceof AfterLoadEvent) {
|
||||
AfterLoadEvent<?> afterLoadEvent = (AfterLoadEvent<?>) event;
|
||||
|
||||
if (domainClass.isAssignableFrom(afterLoadEvent.getType())) {
|
||||
onAfterLoad(event.getDBObject());
|
||||
onAfterLoad((AfterLoadEvent<E>) event);
|
||||
}
|
||||
|
||||
return;
|
||||
@@ -65,18 +66,18 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
|
||||
if (eventDomainType != null && domainClass.isAssignableFrom(eventDomainType)) {
|
||||
if (event instanceof BeforeDeleteEvent) {
|
||||
onBeforeDelete(event.getDBObject());
|
||||
onBeforeDelete((BeforeDeleteEvent<E>) event);
|
||||
}
|
||||
if (event instanceof AfterDeleteEvent) {
|
||||
onAfterDelete(event.getDBObject());
|
||||
onAfterDelete((AfterDeleteEvent<E>) event);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
E source = (E) event.getSource();
|
||||
Object source = event.getSource();
|
||||
|
||||
// Check for matching domain type and invoke callbacks
|
||||
if (source != null && !domainClass.isAssignableFrom(source.getClass())) {
|
||||
@@ -84,55 +85,185 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
}
|
||||
|
||||
if (event instanceof BeforeConvertEvent) {
|
||||
onBeforeConvert(source);
|
||||
onBeforeConvert((BeforeConvertEvent<E>) event);
|
||||
} else if (event instanceof BeforeSaveEvent) {
|
||||
onBeforeSave(source, event.getDBObject());
|
||||
onBeforeSave((BeforeSaveEvent<E>) event);
|
||||
} else if (event instanceof AfterSaveEvent) {
|
||||
onAfterSave(source, event.getDBObject());
|
||||
onAfterSave((AfterSaveEvent<E>) event);
|
||||
} else if (event instanceof AfterConvertEvent) {
|
||||
onAfterConvert(event.getDBObject(), source);
|
||||
onAfterConvert((AfterConvertEvent<E>) event);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element before conversion.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeConvert(BeforeConvertEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeConvert(E source) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeConvert({})", source);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param event never {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeConvert(BeforeConvertEvent<E> event) {
|
||||
onBeforeConvert(event.getSource());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element and {@link com.mongodb.DBObject} representation before save.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeSave(BeforeSaveEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeSave(E source, DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeSave({}, {})", source, dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeSave(BeforeSaveEvent<E> event) {
|
||||
onBeforeSave(event.getSource(), event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element and {@link com.mongodb.DBObject} representation after save.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterSave(AfterSaveEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterSave(E source, DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterSave({}, {})", source, dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterSave(AfterSaveEvent<E> event) {
|
||||
onAfterSave(event.getSource(), event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures raw {@link com.mongodb.DBObject} when read from MongoDB.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterLoad(AfterLoadEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterLoad(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterLoad({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterLoadEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterLoad(AfterLoadEvent<E> event) {
|
||||
onAfterLoad(event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures raw {@link com.mongodb.DBObject} and converted domain type after conversion.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source will never be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterConvert(AfterConvertEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterConvert(DBObject dbo, E source) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({}, {})", dbo, source);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterConvert(AfterConvertEvent<E> event) {
|
||||
onAfterConvert(event.getDBObject(), event.getSource());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link com.mongodb.DBObject} after delete.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterDelete(AfterDeleteEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterDelete(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterDeleteEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterDelete(AfterDeleteEvent<E> event) {
|
||||
onAfterDelete(event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture {@link com.mongodb.DBObject} before delete.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeDelete(BeforeDeleteEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeDelete(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture {@link BeforeDeleteEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeDelete(BeforeDeleteEvent<E> event) {
|
||||
onBeforeDelete(event.getDBObject());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,20 +13,42 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} thrown after convert of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterConvertEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterConvertEvent(DBObject, Object, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterConvertEvent(DBObject dbo, E source) {
|
||||
super(source, dbo);
|
||||
this(dbo, source, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterConvertEvent(DBObject dbo, E source, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import com.mongodb.DBObject;
|
||||
* will be the query document <em>after</am> it has been mapped onto the domain type handled.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
|
||||
@@ -32,8 +33,22 @@ public class AfterDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
super(dbo, type);
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AfterDeleteEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
super(dbo, type, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @author Jon Brisbin
|
||||
* @author Christoph Leiter
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterLoadEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
|
||||
@@ -36,11 +37,25 @@ public class AfterLoadEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
* Creates a new {@link AfterLoadEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterLoadEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterLoadEvent(DBObject dbo, Class<T> type) {
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
super(dbo, dbo);
|
||||
/**
|
||||
* Creates a new {@link AfterLoadEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterLoadEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
|
||||
super(dbo, dbo, collectionName);
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
this.type = type;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,14 +19,37 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} triggered after save of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterSaveEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterSaveEvent}
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterSaveEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterSaveEvent(E source, DBObject dbo) {
|
||||
super(source, dbo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterSaveEvent(E source, DBObject dbo, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,12 +20,31 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeConvertEvent<T> extends MongoMappingEvent<T> {
|
||||
|
||||
private static final long serialVersionUID = 252614269008845243L;
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeConvertEvent(Object, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeConvertEvent(T source) {
|
||||
super(source, null);
|
||||
this(source, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeConvertEvent(T source, String collectionName) {
|
||||
super(source, null, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import com.mongodb.DBObject;
|
||||
* document <em>before</em> being mapped based on the domain class handled.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
|
||||
@@ -32,8 +33,22 @@ public class BeforeDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
super(dbo, type);
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link BeforeDeleteEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
super(dbo, type, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,14 +19,37 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} triggered before save of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeSaveEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeSaveEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeSaveEvent(E source, DBObject dbo) {
|
||||
super(source, dbo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeSaveEvent(E source, DBObject dbo, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,26 +16,69 @@
|
||||
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Base {@link ApplicationEvent} triggered by Spring Data MongoDB.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoMappingEvent<T> extends ApplicationEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private final DBObject dbo;
|
||||
private final String collectionName;
|
||||
|
||||
/**
|
||||
* Creates new {@link MongoMappingEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #MongoMappingEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoMappingEvent(T source, DBObject dbo) {
|
||||
super(source);
|
||||
this.dbo = dbo;
|
||||
this(source, dbo, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link MongoMappingEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
*/
|
||||
public MongoMappingEvent(T source, DBObject dbo, String collectionName) {
|
||||
|
||||
super(source);
|
||||
this.dbo = dbo;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal null} if not set.
|
||||
*/
|
||||
public DBObject getDBObject() {
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the collection the event refers to.
|
||||
*
|
||||
* @return {@literal null} if not set.
|
||||
* @since 1.8
|
||||
*/
|
||||
public String getCollectionName() {
|
||||
return collectionName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.util.EventObject#getSource()
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
@Override
|
||||
public T getSource() {
|
||||
|
||||
@@ -176,7 +176,7 @@ public class Query {
|
||||
|
||||
for (Order order : sort) {
|
||||
if (order.isIgnoreCase()) {
|
||||
throw new IllegalArgumentException(String.format("Gven sort contained an Order for %s with ignore case! "
|
||||
throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case! "
|
||||
+ "MongoDB does not support sorting ignoreing case currently!", order.getProperty()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,7 +63,7 @@ public class TextCriteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* For a full list of supported languages see the mongdodb reference manual for <a
|
||||
* For a full list of supported languages see the mongodb reference manual for <a
|
||||
* href="http://docs.mongodb.org/manual/reference/text-search-languages/">Text Search Languages</a>.
|
||||
*
|
||||
* @param language
|
||||
|
||||
@@ -64,7 +64,7 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exlude fields from making
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exclude fields from making
|
||||
* it into the created {@link Update} object. Note, that this will set attributes directly and <em>not</em> use
|
||||
* {@literal $set}. This means fields not given in the {@link DBObject} will be nulled when executing the update. To
|
||||
* create an only-updating {@link Update} instance of a {@link DBObject}, call {@link #set(String, Object)} for each
|
||||
|
||||
@@ -38,7 +38,7 @@ import org.springframework.data.annotation.QueryAnnotation;
|
||||
public @interface Query {
|
||||
|
||||
/**
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precendece over the
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the
|
||||
* method name then.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -20,6 +20,7 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -29,6 +30,7 @@ import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
@@ -44,6 +46,7 @@ import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom query creator to create Mongo criterias.
|
||||
@@ -55,6 +58,7 @@ import org.springframework.util.Assert;
|
||||
class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MongoQueryCreator.class);
|
||||
private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}");
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final boolean isGeoNearQuery;
|
||||
|
||||
@@ -276,19 +280,23 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
private Criteria createLikeRegexCriteriaOrThrow(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters, boolean shouldNegateExpression) {
|
||||
|
||||
PropertyPath path = part.getProperty().getLeafProperty();
|
||||
|
||||
switch (part.shouldIgnoreCase()) {
|
||||
|
||||
case ALWAYS:
|
||||
if (part.getProperty().getType() != String.class) {
|
||||
throw new IllegalArgumentException(String.format("part %s must be of type String but was %s",
|
||||
part.getProperty(), part.getType()));
|
||||
if (path.getType() != String.class) {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Part %s must be of type String but was %s", path, path.getType()));
|
||||
}
|
||||
// fall-through
|
||||
|
||||
case WHEN_POSSIBLE:
|
||||
|
||||
if (shouldNegateExpression) {
|
||||
criteria = criteria.not();
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.nextConverted(property).toString());
|
||||
|
||||
case NEVER:
|
||||
@@ -365,8 +373,8 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return (T) parameter;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected parameter type of %s but got %s!", type,
|
||||
parameter.getClass()));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(PotentiallyConvertingIterator iterator, MongoPersistentProperty property) {
|
||||
@@ -384,25 +392,59 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
Type type = part.getType();
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, part);
|
||||
|
||||
switch (type) {
|
||||
case STARTING_WITH:
|
||||
source = "^" + source;
|
||||
regex = "^" + regex;
|
||||
break;
|
||||
case ENDING_WITH:
|
||||
source = source + "$";
|
||||
regex = regex + "$";
|
||||
break;
|
||||
case CONTAINING:
|
||||
case NOT_CONTAINING:
|
||||
source = "*" + source + "*";
|
||||
regex = ".*" + regex + ".*";
|
||||
break;
|
||||
case SIMPLE_PROPERTY:
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
source = "^" + source + "$";
|
||||
regex = "^" + regex + "$";
|
||||
default:
|
||||
}
|
||||
|
||||
return source.replaceAll("\\*", ".*");
|
||||
return regex;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Part qpart) {
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(Type.LIKE, qpart.getType())) {
|
||||
return PUNCTATION_PATTERN.matcher(source).find() ? Pattern.quote(source) : source;
|
||||
}
|
||||
|
||||
if (source.equals("*")) {
|
||||
return ".*";
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
boolean leadingWildcard = source.startsWith("*");
|
||||
boolean trailingWildcard = source.endsWith("*");
|
||||
|
||||
String valueToUse = source.substring(leadingWildcard ? 1 : 0,
|
||||
trailingWildcard ? source.length() - 1 : source.length());
|
||||
|
||||
if (PUNCTATION_PATTERN.matcher(valueToUse).find()) {
|
||||
valueToUse = Pattern.quote(valueToUse);
|
||||
}
|
||||
|
||||
if (leadingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
sb.append(valueToUse);
|
||||
if (trailingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private boolean isSpherical(MongoPersistentProperty property) {
|
||||
|
||||
@@ -34,6 +34,7 @@ import org.springframework.data.repository.query.QueryMethod;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -122,13 +123,22 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
Class<?> returnedObjectType = getReturnedObjectType();
|
||||
Class<?> domainClass = getDomainClass();
|
||||
|
||||
MongoPersistentEntity<?> returnedEntity = mappingContext.getPersistentEntity(getReturnedObjectType());
|
||||
MongoPersistentEntity<?> managedEntity = mappingContext.getPersistentEntity(domainClass);
|
||||
returnedEntity = returnedEntity == null ? managedEntity : returnedEntity;
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
if (ClassUtils.isPrimitiveOrWrapper(returnedObjectType)) {
|
||||
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(), collectionEntity);
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) domainClass,
|
||||
mappingContext.getPersistentEntity(domainClass));
|
||||
|
||||
} else {
|
||||
|
||||
MongoPersistentEntity<?> returnedEntity = mappingContext.getPersistentEntity(returnedObjectType);
|
||||
MongoPersistentEntity<?> managedEntity = mappingContext.getPersistentEntity(domainClass);
|
||||
returnedEntity = returnedEntity == null ? managedEntity : returnedEntity;
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(),
|
||||
collectionEntity);
|
||||
}
|
||||
}
|
||||
|
||||
return this.metadata;
|
||||
|
||||
@@ -168,6 +168,8 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
return input;
|
||||
}
|
||||
|
||||
boolean isCompletlyParameterizedQuery = input.matches("^\\?\\d+$");
|
||||
|
||||
StringBuilder result = new StringBuilder(input);
|
||||
|
||||
for (ParameterBinding binding : bindings) {
|
||||
@@ -176,7 +178,30 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
int idx = result.indexOf(parameter);
|
||||
|
||||
if (idx != -1) {
|
||||
result.replace(idx, idx + parameter.length(), getParameterValueForBinding(accessor, binding));
|
||||
String valueForBinding = getParameterValueForBinding(accessor, binding);
|
||||
|
||||
// if the value to bind is an object literal we need to remove the quoting around
|
||||
// the expression insertion point.
|
||||
boolean shouldPotentiallyRemoveQuotes = valueForBinding.startsWith("{") && !isCompletlyParameterizedQuery;
|
||||
|
||||
int start = idx;
|
||||
int end = idx + parameter.length();
|
||||
|
||||
if (shouldPotentiallyRemoveQuotes) {
|
||||
|
||||
// is the insertion point actually surrounded by quotes?
|
||||
char beforeStart = result.charAt(start - 1);
|
||||
char afterEnd = result.charAt(end);
|
||||
|
||||
if ((beforeStart == '\'' || beforeStart == '"') && (afterEnd == '\'' || afterEnd == '"')) {
|
||||
|
||||
// skip preceeding and following quote
|
||||
start -= 1;
|
||||
end += 1;
|
||||
}
|
||||
}
|
||||
|
||||
result.replace(start, end, valueForBinding);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,6 +239,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
EvaluationContext evaluationContext = evaluationContextProvider
|
||||
.getEvaluationContext(getQueryMethod().getParameters(), parameterValues);
|
||||
Expression expression = expressionParser.parseExpression(expressionString);
|
||||
|
||||
return expression.getValue(evaluationContext, Object.class);
|
||||
}
|
||||
|
||||
@@ -226,11 +252,16 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
private static final String EXPRESSION_PARAM_QUOTE = "'";
|
||||
private static final String EXPRESSION_PARAM_PREFIX = "?expr";
|
||||
private static final String INDEX_BASED_EXPRESSION_PARAM_START = "?#{";
|
||||
private static final String NAME_BASED_EXPRESSION_PARAM_START = ":#{";
|
||||
private static final char CURRLY_BRACE_OPEN = '{';
|
||||
private static final char CURRLY_BRACE_CLOSE = '}';
|
||||
private static final String PARAMETER_PREFIX = "_param_";
|
||||
private static final String PARSEABLE_PARAMETER = "\"" + PARAMETER_PREFIX + "$1\"";
|
||||
private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)");
|
||||
private static final Pattern PARSEABLE_BINDING_PATTERN = Pattern.compile("\"?" + PARAMETER_PREFIX + "(\\d+)\"?");
|
||||
private static final Pattern PARAMETER_EXPRESSION_PATTERN = Pattern.compile("((:|\\?)#\\{([^}]+)\\})");
|
||||
|
||||
private final static int PARAMETER_INDEX_GROUP = 1;
|
||||
|
||||
@@ -258,44 +289,64 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
return transformedInput;
|
||||
}
|
||||
|
||||
private String transformQueryAndCollectExpressionParametersIntoBindings(String input,
|
||||
private static String transformQueryAndCollectExpressionParametersIntoBindings(String input,
|
||||
List<ParameterBinding> bindings) {
|
||||
|
||||
Matcher matcher = PARAMETER_EXPRESSION_PATTERN.matcher(input);
|
||||
|
||||
StringBuilder result = new StringBuilder();
|
||||
|
||||
int lastPos = 0;
|
||||
int startIndex = 0;
|
||||
int currentPos = 0;
|
||||
int exprIndex = 0;
|
||||
|
||||
while (matcher.find()) {
|
||||
while (currentPos < input.length()) {
|
||||
|
||||
int startOffSet = matcher.start();
|
||||
int indexOfExpressionParameter = getIndexOfExpressionParameter(input, currentPos);
|
||||
|
||||
result.append(input.subSequence(lastPos, startOffSet));
|
||||
result.append("'?expr").append(exprIndex).append("'");
|
||||
// no expression parameter found
|
||||
if (indexOfExpressionParameter < 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
lastPos = matcher.end();
|
||||
int exprStart = indexOfExpressionParameter + 3;
|
||||
currentPos = exprStart;
|
||||
|
||||
bindings.add(new ParameterBinding(exprIndex, true, matcher.group(3)));
|
||||
// eat parameter expression
|
||||
int curlyBraceOpenCnt = 1;
|
||||
|
||||
while (curlyBraceOpenCnt > 0) {
|
||||
switch (input.charAt(currentPos++)) {
|
||||
case CURRLY_BRACE_OPEN:
|
||||
curlyBraceOpenCnt++;
|
||||
break;
|
||||
case CURRLY_BRACE_CLOSE:
|
||||
curlyBraceOpenCnt--;
|
||||
break;
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
result.append(input.subSequence(startIndex, indexOfExpressionParameter));
|
||||
result.append(EXPRESSION_PARAM_QUOTE).append(EXPRESSION_PARAM_PREFIX);
|
||||
result.append(exprIndex);
|
||||
result.append(EXPRESSION_PARAM_QUOTE);
|
||||
|
||||
bindings.add(new ParameterBinding(exprIndex, true, input.substring(exprStart, currentPos - 1)));
|
||||
|
||||
startIndex = currentPos;
|
||||
|
||||
exprIndex++;
|
||||
}
|
||||
|
||||
result.append(input.subSequence(lastPos, input.length()));
|
||||
|
||||
return result.toString();
|
||||
return result.append(input.subSequence(currentPos, input.length())).toString();
|
||||
}
|
||||
|
||||
private String makeParameterReferencesParseable(String input) {
|
||||
private static String makeParameterReferencesParseable(String input) {
|
||||
|
||||
Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(input);
|
||||
String parseableInput = matcher.replaceAll(PARSEABLE_PARAMETER);
|
||||
|
||||
return parseableInput;
|
||||
return matcher.replaceAll(PARSEABLE_PARAMETER);
|
||||
}
|
||||
|
||||
private void collectParameterReferencesIntoBindings(List<ParameterBinding> bindings, Object value) {
|
||||
private static void collectParameterReferencesIntoBindings(List<ParameterBinding> bindings, Object value) {
|
||||
|
||||
if (value instanceof String) {
|
||||
|
||||
@@ -338,7 +389,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
}
|
||||
}
|
||||
|
||||
private void potentiallyAddBinding(String source, List<ParameterBinding> bindings) {
|
||||
private static void potentiallyAddBinding(String source, List<ParameterBinding> bindings) {
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(source);
|
||||
|
||||
@@ -351,6 +402,14 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
}
|
||||
|
||||
private static int getIndexOfExpressionParameter(String input, int position) {
|
||||
|
||||
int indexOfExpressionParameter = input.indexOf(INDEX_BASED_EXPRESSION_PARAM_START, position);
|
||||
|
||||
return indexOfExpressionParameter < 0 ? input.indexOf(NAME_BASED_EXPRESSION_PARAM_START, position)
|
||||
: indexOfExpressionParameter;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mysema.query.apt.Configuration;
|
||||
import com.mysema.query.apt.DefaultConfiguration;
|
||||
|
||||
/**
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annoated classes.
|
||||
* Annotation processor to create Querydsl query types for QueryDsl annotated classes.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -601,7 +601,7 @@ The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
<xsd:attribute name="credentials" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of username:password@database entries to use for authentication. Appending ?uri.authMechanism allows to specify the authentication challenge mechanism.
|
||||
The comma delimited list of username:password@database entries to use for authentication. Appending ?uri.authMechanism allows to specify the authentication challenge mechanism. If the credential you're trying to pass contains a comma itself, quote it with single quotes: '…'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
|
||||
@@ -607,7 +607,7 @@ The comma delimited list of host:port entries to use for replica set/pairs.
|
||||
<xsd:attribute name="credentials" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The comma delimited list of username:password@database entries to use for authentication. Appending ?uri.authMechanism allows to specify the authentication challenge mechanism.
|
||||
The comma delimited list of username:password@database entries to use for authentication. Appending ?uri.authMechanism allows to specify the authentication challenge mechanism. If the credential you're trying to pass contains a comma itself, quote it with single quotes: '…'.
|
||||
]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
|
||||
@@ -47,7 +47,7 @@ public class AuditingIntegrationTests {
|
||||
mappingContext.getPersistentEntity(Entity.class);
|
||||
|
||||
Entity entity = new Entity();
|
||||
BeforeConvertEvent<Entity> event = new BeforeConvertEvent<Entity>(entity);
|
||||
BeforeConvertEvent<Entity> event = new BeforeConvertEvent<Entity>(entity, "collection-1");
|
||||
context.publishEvent(event);
|
||||
|
||||
assertThat(entity.created, is(notNullValue()));
|
||||
@@ -55,7 +55,7 @@ public class AuditingIntegrationTests {
|
||||
|
||||
Thread.sleep(10);
|
||||
entity.id = 1L;
|
||||
event = new BeforeConvertEvent<Entity>(entity);
|
||||
event = new BeforeConvertEvent<Entity>(entity, "collection-1");
|
||||
context.publishEvent(event);
|
||||
|
||||
assertThat(entity.created, is(notNullValue()));
|
||||
|
||||
@@ -43,6 +43,9 @@ public class MongoCredentialPropertyEditorUnitTests {
|
||||
static final String USER_2_PWD = "warg";
|
||||
static final String USER_2_DB = "snow";
|
||||
|
||||
static final String USER_3_NAME = "CN=myName,OU=myOrgUnit,O=myOrg,L=myLocality,ST=myState,C=myCountry";
|
||||
static final String USER_3_DB = "stark";
|
||||
|
||||
static final String USER_1_AUTH_STRING = USER_1_NAME + ":" + USER_1_PWD + "@" + USER_1_DB;
|
||||
static final String USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM = USER_1_AUTH_STRING + "?uri.authMechanism=PLAIN";
|
||||
|
||||
@@ -50,6 +53,9 @@ public class MongoCredentialPropertyEditorUnitTests {
|
||||
static final String USER_2_AUTH_STRING_WITH_MONGODB_CR_AUTH_MECHANISM = USER_2_AUTH_STRING
|
||||
+ "?uri.authMechanism=MONGODB-CR";
|
||||
|
||||
static final String USER_3_AUTH_STRING_WITH_X509_AUTH_MECHANISM = "'" + USER_3_NAME + "@" + USER_3_DB
|
||||
+ "?uri.authMechanism=MONGODB-X509'";
|
||||
|
||||
static final MongoCredential USER_1_CREDENTIALS = MongoCredential.createCredential(USER_1_NAME, USER_1_DB,
|
||||
USER_1_PWD.toCharArray());
|
||||
static final MongoCredential USER_1_CREDENTIALS_PLAIN_AUTH = MongoCredential.createPlainCredential(USER_1_NAME,
|
||||
@@ -60,6 +66,8 @@ public class MongoCredentialPropertyEditorUnitTests {
|
||||
static final MongoCredential USER_2_CREDENTIALS_CR_AUTH = MongoCredential.createMongoCRCredential(USER_2_NAME,
|
||||
USER_2_DB, USER_2_PWD.toCharArray());
|
||||
|
||||
static final MongoCredential USER_3_CREDENTIALS_X509_AUTH = MongoCredential.createMongoX509Credential(USER_3_NAME);
|
||||
|
||||
MongoCredentialPropertyEditor editor;
|
||||
|
||||
@Before
|
||||
@@ -168,4 +176,75 @@ public class MongoCredentialPropertyEditorUnitTests {
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(USER_1_CREDENTIALS_PLAIN_AUTH, USER_2_CREDENTIALS));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleQuotedUserNamePasswordStringWithDatabaseAndNoOptions() {
|
||||
|
||||
editor.setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList("'" + USER_1_AUTH_STRING + "'", "'"
|
||||
+ USER_2_AUTH_STRING + "'")));
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void shouldReturnCredentialsValueCorrectlyWhenGivenSingleQuotedUserNamePasswordStringWithDatabaseAndNoOptions() {
|
||||
|
||||
editor.setAsText("'" + USER_1_AUTH_STRING + "'");
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(USER_1_CREDENTIALS));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void shouldReturnX509CredentialsCorrectly() {
|
||||
|
||||
editor.setAsText(USER_3_AUTH_STRING_WITH_X509_AUTH_MECHANISM);
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(USER_3_CREDENTIALS_X509_AUTH));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void shouldReturnX509CredentialsCorrectlyWhenNoDbSpecified() {
|
||||
|
||||
editor.setAsText("tyrion?uri.authMechanism=MONGODB-X509");
|
||||
|
||||
assertThat((List<MongoCredential>) editor.getValue(), contains(MongoCredential.createMongoX509Credential("tyrion")));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenNoDbSpecifiedForMongodbCR() {
|
||||
|
||||
editor.setAsText("tyrion?uri.authMechanism=MONGODB-CR");
|
||||
|
||||
editor.getValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1257
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenDbIsEmptyForMongodbCR() {
|
||||
|
||||
editor.setAsText("tyrion@?uri.authMechanism=MONGODB-CR");
|
||||
|
||||
editor.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,7 +43,9 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
@@ -52,18 +54,21 @@ import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCre
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.CommandResult;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.ReadPreference;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link MongoTemplate}.
|
||||
@@ -353,6 +358,70 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
assertThat(captor.getValue(), equalTo(new BasicDBObjectBuilder().add("foo", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void aggregateShouldHonorReadPreferenceWhenSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(
|
||||
mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
template.setReadPreference(ReadPreference.secondary());
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void aggregateShouldIgnoreReadPreferenceWhenNotSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(
|
||||
mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void geoNearShouldHonorReadPreferenceWhenSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(
|
||||
mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
template.setReadPreference(ReadPreference.secondary());
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 1));
|
||||
template.geoNear(query, Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class), eq(ReadPreference.secondary()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1166
|
||||
*/
|
||||
@Test
|
||||
public void geoNearShouldIgnoreReadPreferenceWhenNotSet() {
|
||||
|
||||
when(db.command(Mockito.any(DBObject.class), Mockito.any(ReadPreference.class))).thenReturn(
|
||||
mock(CommandResult.class));
|
||||
when(db.command(Mockito.any(DBObject.class))).thenReturn(mock(CommandResult.class));
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 1));
|
||||
template.geoNear(query, Wrapper.class);
|
||||
|
||||
verify(this.db, times(1)).command(Mockito.any(DBObject.class));
|
||||
}
|
||||
|
||||
class AutogenerateableId {
|
||||
|
||||
@Id BigInteger id;
|
||||
|
||||
@@ -22,10 +22,13 @@ import static org.springframework.test.util.ReflectionTestUtils.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
|
||||
@@ -43,6 +46,7 @@ import com.mongodb.MongoURI;
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class SimpleMongoDbFactoryUnitTests {
|
||||
|
||||
public @Rule ExpectedException expectedException = ExpectedException.none();
|
||||
@Mock Mongo mongo;
|
||||
|
||||
/**
|
||||
@@ -115,6 +119,46 @@ public class SimpleMongoDbFactoryUnitTests {
|
||||
assertThat(getField(factory, "authenticationDatabaseName").toString(), is("FooBar"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1260
|
||||
*/
|
||||
@Test
|
||||
public void rejectsMongoClientWithUserCredentials() {
|
||||
|
||||
expectedException.expect(InvalidDataAccessApiUsageException.class);
|
||||
expectedException.expectMessage("use 'MongoCredential' for 'MongoClient'");
|
||||
|
||||
new SimpleMongoDbFactory(mock(MongoClient.class), "cairhienin", new UserCredentials("moiraine", "sedai"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1260
|
||||
*/
|
||||
@Test
|
||||
public void rejectsMongoClientWithUserCredentialsAndAuthDb() {
|
||||
|
||||
expectedException.expect(InvalidDataAccessApiUsageException.class);
|
||||
expectedException.expectMessage("use 'MongoCredential' for 'MongoClient'");
|
||||
|
||||
new SimpleMongoDbFactory(mock(MongoClient.class), "malkieri", new UserCredentials("lan", "mandragoran"), "authdb");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1260
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotRejectMongoClientWithNoCredentials() {
|
||||
new SimpleMongoDbFactory(mock(MongoClient.class), "andoran", UserCredentials.NO_CREDENTIALS);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1260
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotRejectMongoClientWithEmptyUserCredentials() {
|
||||
new SimpleMongoDbFactory(mock(MongoClient.class), "shangtai", new UserCredentials("", ""));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private void rejectsDatabaseName(String databaseName) {
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ public class UnwrapAndReadDbObjectCallbackUnitTests {
|
||||
MappingMongoConverter converter = new MappingMongoConverter(new DefaultDbRefResolver(factory),
|
||||
new MongoMappingContext());
|
||||
|
||||
this.callback = template.new UnwrapAndReadDbObjectCallback<Target>(converter, Target.class);
|
||||
this.callback = template.new UnwrapAndReadDbObjectCallback<Target>(converter, Target.class, "collection-1");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,6 +30,7 @@ import org.junit.rules.ExpectedException;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -37,6 +38,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AggregationUnitTests {
|
||||
|
||||
@@ -283,6 +285,40 @@ public class AggregationUnitTests {
|
||||
is((DBObject) new BasicDBObject("_id", "$someKey").append("doc", new BasicDBObject("$first", "$$ROOT"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1254
|
||||
*/
|
||||
@Test
|
||||
public void shouldExposeAliasedFieldnameForProjectionsIncludingOperationsDownThePipeline() {
|
||||
|
||||
DBObject agg = Aggregation.newAggregation(//
|
||||
project("date") //
|
||||
.and("tags").minus(10).as("tags_count")//
|
||||
, group("date")//
|
||||
.sum("tags_count").as("count")//
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
DBObject group = extractPipelineElement(agg, 1, "$group");
|
||||
assertThat(getAsDBObject(group, "count"), is(new BasicDBObjectBuilder().add("$sum", "$tags_count").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1254
|
||||
*/
|
||||
@Test
|
||||
public void shouldUseAliasedFieldnameForProjectionsIncludingOperationsDownThePipelineWhenUsingSpEL() {
|
||||
|
||||
DBObject agg = Aggregation.newAggregation(//
|
||||
project("date") //
|
||||
.andExpression("tags-10")//
|
||||
, group("date")//
|
||||
.sum("tags_count").as("count")//
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
DBObject group = extractPipelineElement(agg, 1, "$group");
|
||||
assertThat(getAsDBObject(group, "count"), is(new BasicDBObjectBuilder().add("$sum", "$tags_count").get()));
|
||||
}
|
||||
|
||||
private DBObject extractPipelineElement(DBObject agg, int index, String operation) {
|
||||
|
||||
List<DBObject> pipeline = (List<DBObject>) agg.get("pipeline");
|
||||
|
||||
@@ -790,6 +790,34 @@ public class QueryMapperUnitTests {
|
||||
assertThat(dbo, isBsonObject().containing("geoJsonPoint.$geoIntersects.$geometry.coordinates"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1269
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainNumericMapKey() {
|
||||
|
||||
Query query = query(where("map.1.stringProperty").is("ba'alzamon"));
|
||||
|
||||
DBObject dbo = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(EntityWithComplexValueTypeMap.class));
|
||||
|
||||
assertThat(dbo.containsField("map.1.stringProperty"), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1269
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainNumericPositionInList() {
|
||||
|
||||
Query query = query(where("list.1.stringProperty").is("ba'alzamon"));
|
||||
|
||||
DBObject dbo = mapper.getMappedObject(query.getQueryObject(),
|
||||
context.getPersistentEntity(EntityWithComplexValueTypeList.class));
|
||||
|
||||
assertThat(dbo.containsField("list.1.stringProperty"), is(true));
|
||||
}
|
||||
|
||||
@Document
|
||||
public class Foo {
|
||||
@Id private ObjectId id;
|
||||
@@ -890,4 +918,18 @@ public class QueryMapperUnitTests {
|
||||
GeoJsonPoint geoJsonPoint;
|
||||
@Field("geoJsonPointWithNameViaFieldAnnotation") GeoJsonPoint namedGeoJsonPoint;
|
||||
}
|
||||
|
||||
static class SimpeEntityWithoutId {
|
||||
|
||||
String stringProperty;
|
||||
Integer integerProperty;
|
||||
}
|
||||
|
||||
static class EntityWithComplexValueTypeMap {
|
||||
Map<Integer, SimpeEntityWithoutId> map;
|
||||
}
|
||||
|
||||
static class EntityWithComplexValueTypeList {
|
||||
List<SimpeEntityWithoutId> list;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,9 +22,11 @@ import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.*;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.collection.IsIterableContainingInOrder;
|
||||
@@ -41,6 +43,9 @@ import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.Allocation;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.AllocationToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapperUnitTests.ClassWithEnum.StringToAllocationConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
@@ -680,8 +685,176 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteTypeWithListAttributeOfInterfaceType._class"));
|
||||
assertThat(mappedUpdate, isBsonObject()
|
||||
.containing("$set.concreteTypeWithListAttributeOfInterfaceType.models.[0]._class", ModelImpl.class.getName()));
|
||||
assertThat(
|
||||
mappedUpdate,
|
||||
isBsonObject().containing("$set.concreteTypeWithListAttributeOfInterfaceType.models.[0]._class",
|
||||
ModelImpl.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainTypeInformationForObjectValues() {
|
||||
|
||||
Update update = new Update().set("value", new NestedDocument("kaladin"));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.value.name", "kaladin"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.value._class", NestedDocument.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldNotRetainTypeInformationForConcreteValues() {
|
||||
|
||||
Update update = new Update().set("concreteValue", new NestedDocument("shallan"));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.concreteValue.name", "shallan"));
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteValue._class"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetainTypeInformationForObjectValuesWithAlias() {
|
||||
|
||||
Update update = new Update().set("value", new NestedDocument("adolin"));
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithAliasedObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.renamed-value.name", "adolin"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.renamed-value._class", NestedDocument.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldRetrainTypeInformationWhenValueTypeOfMapDoesNotMatchItsDeclaration() {
|
||||
|
||||
Map<Object, Object> map = Collections.<Object, Object> singletonMap("szeth", new NestedDocument("son-son-vallano"));
|
||||
|
||||
Update update = new Update().set("map", map);
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.map.szeth.name", "son-son-vallano"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.map.szeth._class", NestedDocument.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1236
|
||||
*/
|
||||
@Test
|
||||
public void mappingShouldNotContainTypeInformationWhenValueTypeOfMapMatchesDeclaration() {
|
||||
|
||||
Map<Object, NestedDocument> map = Collections.<Object, NestedDocument> singletonMap("jasnah", new NestedDocument(
|
||||
"kholin"));
|
||||
|
||||
Update update = new Update().set("concreteMap", map);
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.concreteMap.jasnah.name", "kholin"));
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteMap.jasnah._class"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1250
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void mapsUpdateWithBothReadingAndWritingConverterRegistered() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(
|
||||
Arrays.asList(AllocationToStringConverter.INSTANCE, StringToAllocationConverter.INSTANCE));
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
|
||||
mappingContext.afterPropertiesSet();
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(mock(DbRefResolver.class), mappingContext);
|
||||
converter.setCustomConversions(conversions);
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
Update update = new Update().set("allocation", Allocation.AVAILABLE);
|
||||
DBObject result = mapper.getMappedObject(update.getUpdateObject(),
|
||||
mappingContext.getPersistentEntity(ClassWithEnum.class));
|
||||
|
||||
assertThat(result, isBsonObject().containing("$set.allocation", Allocation.AVAILABLE.code));
|
||||
}
|
||||
|
||||
/**
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForSimpleTypes() {
|
||||
|
||||
Update update = new Update().set("value", null);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ConcreteChildClass.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.containsField("value"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForJava8Date() {
|
||||
|
||||
Update update = new Update().set("date", null);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ClassWithJava8Date.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.containsField("date"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForCollectionTypes() {
|
||||
|
||||
Update update = new Update().set("values", null);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ListModel.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.containsField("values"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* see DATAMONGO-1251
|
||||
*/
|
||||
@Test
|
||||
public void mapsNullValueCorrectlyForPropertyOfNestedDocument() {
|
||||
|
||||
Update update = new Update().set("concreteValue.name", null);
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
DBObject $set = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$set");
|
||||
assertThat($set.containsField("concreteValue.name"), is(true));
|
||||
assertThat($set.get("concreteValue.name"), nullValue());
|
||||
}
|
||||
|
||||
static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes {
|
||||
@@ -889,4 +1062,73 @@ public class UpdateMapperUnitTests {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
static class EntityWithObject {
|
||||
|
||||
Object value;
|
||||
NestedDocument concreteValue;
|
||||
}
|
||||
|
||||
static class EntityWithAliasedObject {
|
||||
|
||||
@Field("renamed-value") Object value;
|
||||
}
|
||||
|
||||
static class EntityWithObjectMap {
|
||||
|
||||
Map<Object, Object> map;
|
||||
Map<Object, NestedDocument> concreteMap;
|
||||
}
|
||||
|
||||
static class ClassWithEnum {
|
||||
|
||||
Allocation allocation;
|
||||
|
||||
static enum Allocation {
|
||||
|
||||
AVAILABLE("V"), ALLOCATED("A");
|
||||
|
||||
String code;
|
||||
|
||||
private Allocation(String code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public static Allocation of(String code) {
|
||||
|
||||
for (Allocation value : values()) {
|
||||
if (value.code.equals(code)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
}
|
||||
|
||||
static enum AllocationToStringConverter implements Converter<Allocation, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public String convert(Allocation source) {
|
||||
return source.code;
|
||||
}
|
||||
}
|
||||
|
||||
static enum StringToAllocationConverter implements Converter<String, Allocation> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Allocation convert(String source) {
|
||||
return Allocation.of(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class ClassWithJava8Date {
|
||||
|
||||
LocalDate date;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,25 +18,37 @@ package org.springframework.data.mongodb.core.index;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hamcrest.core.IsInstanceOf;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.rules.RuleChain;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.test.util.CleanMongoDB;
|
||||
import org.springframework.data.mongodb.test.util.MongoVersionRule;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoCommandException;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link MongoPersistentEntityIndexCreator}.
|
||||
*
|
||||
@@ -54,6 +66,8 @@ public class MongoPersistentEntityIndexCreatorIntegrationTests {
|
||||
public static @ClassRule RuleChain rules = RuleChain.outerRule(MongoVersionRule.atLeast(new Version(2, 6))).around(
|
||||
CleanMongoDB.indexes(Arrays.asList(SAMPLE_TYPE_COLLECTION_NAME, RECURSIVE_TYPE_COLLECTION_NAME)));
|
||||
|
||||
public @Rule ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
@Autowired @Qualifier("mongo1") MongoOperations templateOne;
|
||||
|
||||
@Autowired @Qualifier("mongo2") MongoOperations templateTwo;
|
||||
@@ -81,6 +95,28 @@ public class MongoPersistentEntityIndexCreatorIntegrationTests {
|
||||
assertThat(indexInfo, Matchers.<IndexInfo> hasItem(hasProperty("name", is("firstName"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @DATAMONGO-1125
|
||||
*/
|
||||
@Test
|
||||
public void createIndexShouldThrowMeaningfulExceptionWhenIndexCreationFails() throws UnknownHostException {
|
||||
|
||||
expectedException.expect(DataIntegrityViolationException.class);
|
||||
expectedException.expectMessage("collection 'datamongo-1125'");
|
||||
expectedException.expectMessage("dalinar.kohlin");
|
||||
expectedException.expectMessage("lastname");
|
||||
expectedException.expectCause(IsInstanceOf.<Throwable> instanceOf(MongoCommandException.class));
|
||||
|
||||
MongoPersistentEntityIndexCreator indexCreator = new MongoPersistentEntityIndexCreator(new MongoMappingContext(),
|
||||
new SimpleMongoDbFactory(new MongoClient(), "issue"));
|
||||
|
||||
indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", new Index().named("stormlight")
|
||||
.on("lastname", Direction.ASC).unique(), "datamongo-1125"));
|
||||
|
||||
indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", new Index().named("stormlight")
|
||||
.on("lastname", Direction.ASC).sparse(), "datamongo-1125"));
|
||||
}
|
||||
|
||||
@Document(collection = RECURSIVE_TYPE_COLLECTION_NAME)
|
||||
static abstract class RecursiveGenericType<RGT extends RecursiveGenericType<RGT>> {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
* Copyright 2012-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,11 +28,14 @@ import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
@@ -43,6 +46,7 @@ import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link MongoPersistentEntityIndexCreator}.
|
||||
@@ -211,6 +215,36 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
|
||||
assertThat(collectionNameCapturer.getValue(), equalTo("indexedDocumentWrapper"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1125
|
||||
*/
|
||||
@Test(expected = DataAccessException.class)
|
||||
public void createIndexShouldUsePersistenceExceptionTranslatorForNonDataIntegrityConcerns() {
|
||||
|
||||
when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator());
|
||||
doThrow(new MongoException(6, "HostUnreachable")).when(collection).createIndex(Mockito.any(DBObject.class),
|
||||
Mockito.any(DBObject.class));
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(Person.class);
|
||||
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1125
|
||||
*/
|
||||
@Test(expected = ClassCastException.class)
|
||||
public void createIndexShouldNotConvertUnknownExceptionTypes() {
|
||||
|
||||
when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator());
|
||||
doThrow(new ClassCastException("o_O")).when(collection).createIndex(Mockito.any(DBObject.class),
|
||||
Mockito.any(DBObject.class));
|
||||
|
||||
MongoMappingContext mappingContext = prepareMappingContext(Person.class);
|
||||
|
||||
new MongoPersistentEntityIndexCreator(mappingContext, factory);
|
||||
}
|
||||
|
||||
private static MongoMappingContext prepareMappingContext(Class<?> type) {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
|
||||
@@ -853,6 +853,19 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1263
|
||||
*/
|
||||
@Test
|
||||
public void shouldConsiderGenericTypeArgumentsOfCollectionElements() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(EntityWithGenericTypeWrapperAsElement.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(1));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"),
|
||||
equalTo("listWithGeneircTypeElement.entity.property_index"));
|
||||
}
|
||||
|
||||
@Document
|
||||
static class MixedIndexRoot {
|
||||
|
||||
@@ -1028,6 +1041,15 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested propertyWithIndexedStructure;
|
||||
}
|
||||
|
||||
static class GenericEntityWrapper<T> {
|
||||
T entity;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class EntityWithGenericTypeWrapperAsElement {
|
||||
List<GenericEntityWrapper<DocumentWithNamedIndex>> listWithGeneircTypeElement;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static List<IndexDefinitionHolder> prepareMappingContextAndResolveIndexForType(Class<?> type) {
|
||||
|
||||
@@ -143,6 +143,89 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
verify(propertyMock, never()).getActualType();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1157
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@Test(expected = MappingException.class)
|
||||
public void verifyShouldThrowErrorForLazyDBRefOnFinalClass() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock(
|
||||
org.springframework.data.mongodb.core.mapping.DBRef.class);
|
||||
when(propertyMock.isDbReference()).thenReturn(true);
|
||||
when(propertyMock.getDBRef()).thenReturn(dbRefMock);
|
||||
when(dbRefMock.lazy()).thenReturn(true);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) Class.class);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1157
|
||||
*/
|
||||
@Test(expected = MappingException.class)
|
||||
public void verifyShouldThrowErrorForLazyDBRefArray() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock(
|
||||
org.springframework.data.mongodb.core.mapping.DBRef.class);
|
||||
when(propertyMock.isDbReference()).thenReturn(true);
|
||||
when(propertyMock.getDBRef()).thenReturn(dbRefMock);
|
||||
when(dbRefMock.lazy()).thenReturn(true);
|
||||
when(propertyMock.isArray()).thenReturn(true);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1157
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public void verifyShouldPassForLazyDBRefOnNonArrayNonFinalClass() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock(
|
||||
org.springframework.data.mongodb.core.mapping.DBRef.class);
|
||||
when(propertyMock.isDbReference()).thenReturn(true);
|
||||
when(propertyMock.getDBRef()).thenReturn(dbRefMock);
|
||||
when(dbRefMock.lazy()).thenReturn(true);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) Object.class);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
|
||||
verify(propertyMock, times(1)).isDbReference();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1157
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public void verifyShouldPassForNonLazyDBRefOnFinalClass() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock(
|
||||
org.springframework.data.mongodb.core.mapping.DBRef.class);
|
||||
when(propertyMock.isDbReference()).thenReturn(true);
|
||||
when(propertyMock.getDBRef()).thenReturn(dbRefMock);
|
||||
when(dbRefMock.lazy()).thenReturn(false);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) Class.class);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
|
||||
verify(dbRefMock, times(1)).lazy();
|
||||
}
|
||||
|
||||
@Document(collection = "contacts")
|
||||
class Contact {
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
@Test
|
||||
public void invokesCallbackForEventForPerson() {
|
||||
|
||||
MongoMappingEvent<Person> event = new BeforeConvertEvent<Person>(new Person("Dave", "Matthews"));
|
||||
MongoMappingEvent<Person> event = new BeforeConvertEvent<Person>(new Person("Dave", "Matthews"), "collection-1");
|
||||
SamplePersonEventListener listener = new SamplePersonEventListener();
|
||||
listener.onApplicationEvent(event);
|
||||
assertThat(listener.invokedOnBeforeConvert, is(true));
|
||||
@@ -54,11 +54,11 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
SamplePersonEventListener listener = new SamplePersonEventListener();
|
||||
context.addApplicationListener(listener);
|
||||
|
||||
context.publishEvent(new BeforeConvertEvent<Person>(new Person("Dave", "Matthews")));
|
||||
context.publishEvent(new BeforeConvertEvent<Person>(new Person("Dave", "Matthews"), "collection-1"));
|
||||
assertThat(listener.invokedOnBeforeConvert, is(true));
|
||||
|
||||
listener.invokedOnBeforeConvert = false;
|
||||
context.publishEvent(new BeforeConvertEvent<String>("Test"));
|
||||
context.publishEvent(new BeforeConvertEvent<String>("Test", "collection-1"));
|
||||
assertThat(listener.invokedOnBeforeConvert, is(false));
|
||||
|
||||
context.close();
|
||||
@@ -71,7 +71,7 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
public void afterLoadEffectGetsHandledCorrectly() {
|
||||
|
||||
SamplePersonEventListener listener = new SamplePersonEventListener();
|
||||
listener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class));
|
||||
listener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class, "collection-1"));
|
||||
assertThat(listener.invokedOnAfterLoad, is(true));
|
||||
}
|
||||
|
||||
@@ -83,8 +83,8 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
|
||||
SamplePersonEventListener personListener = new SamplePersonEventListener();
|
||||
SampleAccountEventListener accountListener = new SampleAccountEventListener();
|
||||
personListener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class));
|
||||
accountListener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class));
|
||||
personListener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class, "collection-1"));
|
||||
accountListener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class, "collection-1"));
|
||||
|
||||
assertThat(personListener.invokedOnAfterLoad, is(true));
|
||||
assertThat(accountListener.invokedOnAfterLoad, is(false));
|
||||
@@ -98,8 +98,8 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
|
||||
SamplePersonEventListener personListener = new SamplePersonEventListener();
|
||||
SampleContactEventListener contactListener = new SampleContactEventListener();
|
||||
personListener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class));
|
||||
contactListener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class));
|
||||
personListener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class, "collection-1"));
|
||||
contactListener.onApplicationEvent(new AfterLoadEvent<Person>(new BasicDBObject(), Person.class, "collection-1"));
|
||||
|
||||
assertThat(personListener.invokedOnAfterLoad, is(true));
|
||||
assertThat(contactListener.invokedOnAfterLoad, is(true));
|
||||
@@ -113,8 +113,8 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
|
||||
SamplePersonEventListener personListener = new SamplePersonEventListener();
|
||||
SampleContactEventListener contactListener = new SampleContactEventListener();
|
||||
personListener.onApplicationEvent(new AfterLoadEvent<Contact>(new BasicDBObject(), Contact.class));
|
||||
contactListener.onApplicationEvent(new AfterLoadEvent<Contact>(new BasicDBObject(), Contact.class));
|
||||
personListener.onApplicationEvent(new AfterLoadEvent<Contact>(new BasicDBObject(), Contact.class, "collection-1"));
|
||||
contactListener.onApplicationEvent(new AfterLoadEvent<Contact>(new BasicDBObject(), Contact.class, "collection-1"));
|
||||
|
||||
assertThat(personListener.invokedOnAfterLoad, is(false));
|
||||
assertThat(contactListener.invokedOnAfterLoad, is(true));
|
||||
@@ -137,7 +137,7 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
@Test
|
||||
public void invokeContactCallbackForPersonEvent() {
|
||||
|
||||
MongoMappingEvent<DBObject> event = new BeforeDeleteEvent<Person>(new BasicDBObject(), Person.class);
|
||||
MongoMappingEvent<DBObject> event = new BeforeDeleteEvent<Person>(new BasicDBObject(), Person.class, "collection-1");
|
||||
SampleContactEventListener listener = new SampleContactEventListener();
|
||||
listener.onApplicationEvent(event);
|
||||
|
||||
@@ -150,7 +150,7 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
@Test
|
||||
public void invokePersonCallbackForPersonEvent() {
|
||||
|
||||
MongoMappingEvent<DBObject> event = new BeforeDeleteEvent<Person>(new BasicDBObject(), Person.class);
|
||||
MongoMappingEvent<DBObject> event = new BeforeDeleteEvent<Person>(new BasicDBObject(), Person.class, "collection-1");
|
||||
SamplePersonEventListener listener = new SamplePersonEventListener();
|
||||
listener.onApplicationEvent(event);
|
||||
|
||||
@@ -163,7 +163,8 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
@Test
|
||||
public void dontInvokePersonCallbackForAccountEvent() {
|
||||
|
||||
MongoMappingEvent<DBObject> event = new BeforeDeleteEvent<Account>(new BasicDBObject(), Account.class);
|
||||
MongoMappingEvent<DBObject> event = new BeforeDeleteEvent<Account>(new BasicDBObject(), Account.class,
|
||||
"collection-1");
|
||||
SamplePersonEventListener listener = new SamplePersonEventListener();
|
||||
listener.onApplicationEvent(event);
|
||||
|
||||
@@ -176,7 +177,7 @@ public class AbstractMongoEventListenerUnitTests {
|
||||
@Test
|
||||
public void donInvokePersonCallbackForUntypedEvent() {
|
||||
|
||||
MongoMappingEvent<DBObject> event = new BeforeDeleteEvent<Account>(new BasicDBObject(), null);
|
||||
MongoMappingEvent<DBObject> event = new BeforeDeleteEvent<Account>(new BasicDBObject(), null, "collection-1");
|
||||
SamplePersonEventListener listener = new SamplePersonEventListener();
|
||||
listener.onApplicationEvent(event);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,7 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import static org.hamcrest.core.Is.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
@@ -26,6 +29,7 @@ import org.junit.Test;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.mapping.PersonPojoStringId;
|
||||
|
||||
import com.mongodb.DB;
|
||||
@@ -38,10 +42,13 @@ import com.mongodb.WriteConcern;
|
||||
* Integration test for Mapping Events.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class ApplicationContextEventTests {
|
||||
|
||||
private final String[] collectionsToDrop = new String[] { "personPojoStringId" };
|
||||
private static final String COLLECTION_NAME = "personPojoStringId";
|
||||
|
||||
private final String[] collectionsToDrop = new String[] { COLLECTION_NAME };
|
||||
|
||||
private ApplicationContext applicationContext;
|
||||
private MongoTemplate template;
|
||||
@@ -60,6 +67,7 @@ public class ApplicationContextEventTests {
|
||||
}
|
||||
|
||||
private void cleanDb() throws UnknownHostException {
|
||||
|
||||
Mongo mongo = new MongoClient();
|
||||
DB db = mongo.getDB("database");
|
||||
for (String coll : collectionsToDrop) {
|
||||
@@ -90,6 +98,9 @@ public class ApplicationContextEventTests {
|
||||
assertEquals(1, simpleMappingEventListener.onBeforeSaveEvents.size());
|
||||
assertEquals(1, simpleMappingEventListener.onAfterSaveEvents.size());
|
||||
|
||||
assertEquals(COLLECTION_NAME, simpleMappingEventListener.onBeforeSaveEvents.get(0).getCollectionName());
|
||||
assertEquals(COLLECTION_NAME, simpleMappingEventListener.onAfterSaveEvents.get(0).getCollectionName());
|
||||
|
||||
Assert.assertTrue(personBeforeSaveListener.seenEvents.get(0) instanceof BeforeSaveEvent<?>);
|
||||
Assert.assertTrue(afterSaveListener.seenEvents.get(0) instanceof AfterSaveEvent<?>);
|
||||
|
||||
@@ -106,7 +117,75 @@ public class ApplicationContextEventTests {
|
||||
dbo = beforeSaveEvent.getDBObject();
|
||||
|
||||
comparePersonAndDbo(p, p2, dbo);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1256
|
||||
*/
|
||||
@Test
|
||||
public void loadAndConvertEvents() {
|
||||
|
||||
SimpleMappingEventListener simpleMappingEventListener = applicationContext
|
||||
.getBean(SimpleMappingEventListener.class);
|
||||
|
||||
PersonPojoStringId entity = new PersonPojoStringId("1", "Text");
|
||||
template.insert(entity);
|
||||
|
||||
template.findOne(query(where("id").is(entity.getId())), PersonPojoStringId.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
|
||||
assertThat(simpleMappingEventListener.onBeforeConvertEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onBeforeConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1256
|
||||
*/
|
||||
@Test
|
||||
public void loadEventsOnAggregation() {
|
||||
|
||||
SimpleMappingEventListener simpleMappingEventListener = applicationContext
|
||||
.getBean(SimpleMappingEventListener.class);
|
||||
|
||||
template.insert(new PersonPojoStringId("1", "Text"));
|
||||
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.project("text")), PersonPojoStringId.class,
|
||||
PersonPojoStringId.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
|
||||
assertThat(simpleMappingEventListener.onBeforeConvertEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onBeforeConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1256
|
||||
*/
|
||||
@Test
|
||||
public void deleteEvents() {
|
||||
|
||||
SimpleMappingEventListener simpleMappingEventListener = applicationContext
|
||||
.getBean(SimpleMappingEventListener.class);
|
||||
|
||||
PersonPojoStringId entity = new PersonPojoStringId("1", "Text");
|
||||
template.insert(entity);
|
||||
|
||||
template.remove(entity);
|
||||
|
||||
assertThat(simpleMappingEventListener.onBeforeDeleteEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onBeforeDeleteEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterDeleteEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterDeleteEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
}
|
||||
|
||||
private void comparePersonAndDbo(PersonPojoStringId p, PersonPojoStringId p2, DBObject dbo) {
|
||||
|
||||
@@ -78,7 +78,7 @@ public class AuditingEventListenerUnitTests {
|
||||
public void triggersCreationMarkForObjectWithEmptyId() {
|
||||
|
||||
Sample sample = new Sample();
|
||||
listener.onApplicationEvent(new BeforeConvertEvent<Object>(sample));
|
||||
listener.onApplicationEvent(new BeforeConvertEvent<Object>(sample, "collection-1"));
|
||||
|
||||
verify(handler, times(1)).markCreated(sample);
|
||||
verify(handler, times(0)).markModified(any(Sample.class));
|
||||
@@ -92,7 +92,7 @@ public class AuditingEventListenerUnitTests {
|
||||
|
||||
Sample sample = new Sample();
|
||||
sample.id = "id";
|
||||
listener.onApplicationEvent(new BeforeConvertEvent<Object>(sample));
|
||||
listener.onApplicationEvent(new BeforeConvertEvent<Object>(sample, "collection-1"));
|
||||
|
||||
verify(handler, times(0)).markCreated(any(Sample.class));
|
||||
verify(handler, times(1)).markModified(sample);
|
||||
|
||||
@@ -21,8 +21,6 @@ import java.util.List;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.PersonPojoStringId;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
public class PersonBeforeSaveListener extends AbstractMongoEventListener<PersonPojoStringId> {
|
||||
|
||||
public final List<ApplicationEvent> seenEvents = new ArrayList<ApplicationEvent>();
|
||||
@@ -32,7 +30,7 @@ public class PersonBeforeSaveListener extends AbstractMongoEventListener<PersonP
|
||||
* @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(java.lang.Object, com.mongodb.DBObject)
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeSave(PersonPojoStringId source, DBObject dbo) {
|
||||
seenEvents.add(new BeforeSaveEvent<PersonPojoStringId>(source, dbo));
|
||||
public void onBeforeSave(BeforeSaveEvent<PersonPojoStringId> event) {
|
||||
seenEvents.add(event);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,8 +17,12 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Mark Pollak
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Leiter
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class SimpleMappingEventListener extends AbstractMongoEventListener<Object> {
|
||||
|
||||
public final ArrayList<BeforeConvertEvent<Object>> onBeforeConvertEvents = new ArrayList<BeforeConvertEvent<Object>>();
|
||||
@@ -26,29 +30,41 @@ public class SimpleMappingEventListener extends AbstractMongoEventListener<Objec
|
||||
public final ArrayList<AfterSaveEvent<Object>> onAfterSaveEvents = new ArrayList<AfterSaveEvent<Object>>();
|
||||
public final ArrayList<AfterLoadEvent<Object>> onAfterLoadEvents = new ArrayList<AfterLoadEvent<Object>>();
|
||||
public final ArrayList<AfterConvertEvent<Object>> onAfterConvertEvents = new ArrayList<AfterConvertEvent<Object>>();
|
||||
public final ArrayList<BeforeDeleteEvent<Object>> onBeforeDeleteEvents = new ArrayList<BeforeDeleteEvent<Object>>();
|
||||
public final ArrayList<AfterDeleteEvent<Object>> onAfterDeleteEvents = new ArrayList<AfterDeleteEvent<Object>>();
|
||||
|
||||
@Override
|
||||
public void onBeforeConvert(Object source) {
|
||||
onBeforeConvertEvents.add(new BeforeConvertEvent<Object>(source));
|
||||
public void onBeforeConvert(BeforeConvertEvent<Object> event) {
|
||||
onBeforeConvertEvents.add(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBeforeSave(Object source, DBObject dbo) {
|
||||
onBeforeSaveEvents.add(new BeforeSaveEvent<Object>(source, dbo));
|
||||
public void onBeforeSave(BeforeSaveEvent<Object> event) {
|
||||
onBeforeSaveEvents.add(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAfterSave(Object source, DBObject dbo) {
|
||||
onAfterSaveEvents.add(new AfterSaveEvent<Object>(source, dbo));
|
||||
public void onAfterSave(AfterSaveEvent<Object> event) {
|
||||
onAfterSaveEvents.add(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAfterLoad(DBObject dbo) {
|
||||
onAfterLoadEvents.add(new AfterLoadEvent<Object>(dbo, Object.class));
|
||||
public void onAfterLoad(AfterLoadEvent<Object> event) {
|
||||
onAfterLoadEvents.add(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAfterConvert(DBObject dbo, Object source) {
|
||||
onAfterConvertEvents.add(new AfterConvertEvent<Object>(dbo, source));
|
||||
public void onAfterConvert(AfterConvertEvent<Object> event) {
|
||||
onAfterConvertEvents.add(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAfterDelete(AfterDeleteEvent<Object> event) {
|
||||
onAfterDeleteEvents.add(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBeforeDelete(BeforeDeleteEvent<Object> event) {
|
||||
onBeforeDeleteEvents.add(event);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
/**
|
||||
* Sample contactt domain class.
|
||||
* Sample contact domain class.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
|
||||
@@ -553,6 +553,114 @@ public class MongoQueryCreatorUnitTests {
|
||||
assertThat(query, is(query(where("address.geo").near(point).minDistance(10D).maxDistance(20D))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1229
|
||||
*/
|
||||
@Test
|
||||
public void appliesIgnoreCaseToLeafProperty() {
|
||||
|
||||
PartTree tree = new PartTree("findByAddressStreetIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "Street");
|
||||
|
||||
assertThat(new MongoQueryCreator(tree, accessor, context).createQuery(), is(notNullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSource() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "con.flux+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex("^\\Qcon.flux+\\E$", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameStartingWithIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "dawns.light+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex("^\\Qdawns.light+\\E", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameEndingWithIgnoreCase", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "new.ton+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex("\\Qnew.ton+\\E$", "i"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex(".*\\Qfire.fight+\\E.*"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
|
||||
assertThat(query, is(query(where("username").regex(".*\\Qsteel.heart+\\E"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "cala.mity+*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
assertThat(query, is(query(where("username").regex("\\Qcala.mity+\\E.*"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1232
|
||||
*/
|
||||
@Test
|
||||
public void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() {
|
||||
|
||||
PartTree tree = new PartTree("findByUsernameLike", User.class);
|
||||
ConvertingParameterAccessor accessor = getAccessor(converter, "*");
|
||||
|
||||
Query query = new MongoQueryCreator(tree, accessor, context).createQuery();
|
||||
assertThat(query, is(query(where("username").regex(".*"))));
|
||||
}
|
||||
|
||||
interface PersonRepository extends Repository<Person, Long> {
|
||||
|
||||
List<Person> findByLocationNearAndFirstname(Point location, Distance maxDistance, String firstname);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -177,6 +177,17 @@ public class MongoQueryMethodUnitTests {
|
||||
assertThat(method.getQueryMetaAttributes().getSnapshot(), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1266
|
||||
*/
|
||||
@Test
|
||||
public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() throws Exception {
|
||||
|
||||
MongoQueryMethod method = queryMethod("deleteByUserName", String.class);
|
||||
|
||||
assertThat(method.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class)));
|
||||
}
|
||||
|
||||
private MongoQueryMethod queryMethod(String name, Class<?>... parameters) throws Exception {
|
||||
Method method = PersonRepository.class.getMethod(name, parameters);
|
||||
return new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), context);
|
||||
@@ -210,6 +221,10 @@ public class MongoQueryMethodUnitTests {
|
||||
@Meta(snapshot = true)
|
||||
List<User> metaWithSnapshotUsage();
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1266
|
||||
*/
|
||||
void deleteByUserName(String userName);
|
||||
}
|
||||
|
||||
interface SampleRepository extends Repository<Contact, Long> {
|
||||
|
||||
@@ -310,6 +310,39 @@ public class StringBasedMongoQueryUnitTests {
|
||||
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1244
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportExpressionsInCustomQueriesWithNestedObject() throws Exception {
|
||||
|
||||
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2");
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndNestedObject", boolean.class,
|
||||
String.class);
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
|
||||
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{ \"id\" : { \"$exists\" : true}}");
|
||||
|
||||
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1244
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportExpressionsInCustomQueriesWithMultipleNestedObjects() throws Exception {
|
||||
|
||||
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2");
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndMultipleNestedObjects",
|
||||
boolean.class, String.class, String.class);
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
|
||||
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(
|
||||
"{ \"id\" : { \"$exists\" : true} , \"foo\" : 42 , \"bar\" : { \"$exists\" : false}}");
|
||||
|
||||
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
|
||||
}
|
||||
|
||||
private StringBasedMongoQuery createQueryForMethod(String name, Class<?>... parameters) throws Exception {
|
||||
|
||||
Method method = SampleRepository.class.getMethod(name, parameters);
|
||||
@@ -355,7 +388,13 @@ public class StringBasedMongoQueryUnitTests {
|
||||
@Query("{ ?0 : ?1}")
|
||||
Object methodWithPlaceholderInKeyOfJsonStructure(String keyReplacement, String valueReplacement);
|
||||
|
||||
@Query(value = "{'lastname': ?#{[0]} }")
|
||||
@Query("{'lastname': ?#{[0]} }")
|
||||
List<Person> findByQueryWithExpression(String param0);
|
||||
|
||||
@Query("{'id':?#{ [0] ? { $exists :true} : [1] }}")
|
||||
List<Person> findByQueryWithExpressionAndNestedObject(boolean param0, String param1);
|
||||
|
||||
@Query("{'id':?#{ [0] ? { $exists :true} : [1] }, 'foo':42, 'bar': ?#{ [0] ? { $exists :false} : [1] }}")
|
||||
List<Person> findByQueryWithExpressionAndMultipleNestedObjects(boolean param0, String param1, String param2);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
[[new-features]]
|
||||
= New & Noteworthy
|
||||
|
||||
[[new-features.1-8-0]]
|
||||
== What's new in Spring Data MongoDB 1.8
|
||||
|
||||
* `Criteria` offers support for creating `$geoIntersects`.
|
||||
* Support http://docs.spring.io/spring/docs/current/spring-framework-reference/html/expressions.html[SpEL expressions] in `@Query`.
|
||||
* `MongoMappingEvents` expose the collection name they are issued for.
|
||||
* Improved support for `<mongo:mongo-client credentials="..." />`.
|
||||
* Improved index creation failure error message.
|
||||
|
||||
[[new-features.1-7-0]]
|
||||
== What's new in Spring Data MongoDB 1.7
|
||||
|
||||
|
||||
@@ -211,6 +211,7 @@ The MappingMongoConverter can use metadata to drive the mapping of objects to do
|
||||
* `@PersistenceConstructor` - marks a given constructor - even a package protected one - to use when instantiating the object from the database. Constructor arguments are mapped by name to the key values in the retrieved DBObject.
|
||||
* `@Value` - this annotation is part of the Spring Framework . Within the mapping framework it can be applied to constructor arguments. This lets you use a Spring Expression Language statement to transform a key's value retrieved in the database before it is used to construct a domain object. In order to reference a property of a given document one has to use expressions like: `@Value("#root.myProperty")` where `root` refers to the root of the given document.
|
||||
* `@Field` - applied at the field level and described the name of the field as it will be represented in the MongoDB BSON document thus allowing the name to be different than the fieldname of the class.
|
||||
* `@Version` - applied at field level is used for optimistic locking and checked for modification on save operations. The initial value is `zero` which is bumped automatically on every update.
|
||||
|
||||
The mapping metadata infrastructure is defined in a seperate spring-data-commons project that is technology agnostic. Specific subclasses are using in the MongoDB support to support annotation based metadata. Other strategies are also possible to put in place if there is demand.
|
||||
|
||||
|
||||
@@ -381,7 +381,7 @@ MongoDB repository support integrates with the http://www.querydsl.com/[QueryDSL
|
||||
* Adopts better to refactoring changes in domain types
|
||||
* Incremental query definition is easier
|
||||
|
||||
Please refer to the QueryDSL documentation which describes how to bootstrap your environment for APT based code generation http://source.mysema.com/static/querydsl/2.1.2/reference/html/ch02.html#d0e112[using Maven] or http://source.mysema.com/static/querydsl/2.1.2/reference/html/ch02.html#d0e131[using Ant].
|
||||
Please refer to the http://www.querydsl.com/static/querydsl/latest/reference/html/[QueryDSL documentation] which describes how to bootstrap your environment for APT based code generation using Maven or Ant.
|
||||
|
||||
Using QueryDSL you will be able to write queries as shown below
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ You will also need to add the location of the Spring Milestone repository for ma
|
||||
</repositories>
|
||||
----
|
||||
|
||||
The repository is also http://shrub.appspot.com/maven.springframework.org/milestone/org/springframework/data/[browseable here].
|
||||
The repository is also http://repo.spring.io/milestone/org/springframework/data/[browseable here].
|
||||
|
||||
You may also want to set the logging level to `DEBUG` to see some additional information, edit the log4j.properties file to have
|
||||
|
||||
@@ -938,6 +938,40 @@ You can use several overloaded methods to remove an object from the database.
|
||||
|
||||
* *remove* Remove the given document based on one of the following: a specific object instance, a query document criteria combined with a class or a query document criteria combined with a specific collection name.
|
||||
|
||||
[[mongo-template.optimistic-locking]]
|
||||
=== Optimistic locking
|
||||
|
||||
The `@Version` annotation provides a JPA similar semantic in the context of MongoDB and makes sure updates are only applied to documents with matching version. Therefore the actual value of the version property is added to the update query in a way that the update won't have any effect if another operation altered the document in between. In that case an `OptimisticLockingFailureException` is thrown.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Document
|
||||
class Person {
|
||||
|
||||
@Id String id;
|
||||
String firstname;
|
||||
String lastname;
|
||||
@Version Long version;
|
||||
}
|
||||
|
||||
Person daenerys = template.insert(new Person("Daenerys")); <1>
|
||||
|
||||
Person tmp = teplate.findOne(query(where("id").is(daenerys.getId())), Person.class); <2>
|
||||
|
||||
daenerys.setLastname("Targaryen");
|
||||
template.save(daenerys); <3>
|
||||
|
||||
template.save(tmp); // throws OptimisticLockingFailureException <4>
|
||||
----
|
||||
<1> Intially insert document. `version` is set to `0`.
|
||||
<2> Load the just inserted document `version` is still `0`.
|
||||
<3> Update document with `version = 0`. Set the `lastname` and bump `version` to `1`.
|
||||
<4> Try to update previously loaded document sill having `version = 0` fails with `OptimisticLockingFailureException` as the current `version` is `1`.
|
||||
====
|
||||
|
||||
IMPORTANT: Using MongoDB driver version 3 requires to set the `WriteConcern` to `ACKNOWLEDGED`. Otherwise `OptimisticLockingFailureException` can be silently swallowed.
|
||||
|
||||
[[mongo.query]]
|
||||
== Querying Documents
|
||||
|
||||
@@ -1159,13 +1193,13 @@ The geo near operations return a `GeoResults` wrapper object that encapsulates `
|
||||
[[mongo.geo-json]]
|
||||
=== GeoJSON Support
|
||||
|
||||
MongoDB supports http://geojeson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data.
|
||||
MongoDB supports http://geojson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data.
|
||||
|
||||
NOTE: Please refer to the http://docs.mongodb.org/manual/core/2dsphere/#geospatial-indexes-store-geojson/[MongoDB manual on GeoJSON support] to learn about requirements and restrictions.
|
||||
|
||||
==== GeoJSON types in domain classes
|
||||
|
||||
Usage of http://geojeson.org/[GeoJSON] types in domain classes is straight forward. The `org.springframework.data.mongodb.core.geo` package contains types like `GeoJsonPoint`, `GeoJsonPolygon` and others. Those are extensions to the existing `org.springframework.data.geo` types.
|
||||
Usage of http://geojson.org/[GeoJSON] types in domain classes is straight forward. The `org.springframework.data.mongodb.core.geo` package contains types like `GeoJsonPoint`, `GeoJsonPolygon` and others. Those are extensions to the existing `org.springframework.data.geo` types.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
@@ -2013,7 +2047,7 @@ class MyConverter implements Converter<Person, String> { … }
|
||||
class MyConverter implements Converter<String, Person> { … }
|
||||
----
|
||||
|
||||
In case you write a `Converter` whose source and target type are native Mongo types there's no way for us to determine whether we should consider it as reading or writing converter. Registering the converter instance as both might lead to unwanted results then. E.g. a `Converter<String, Long>` is ambiguous although it probably does not make sense to try to convert all `String`s into `Long`s when writing. To be generally able to force the infrastructure to register a converter for one way only we provide `@ReadingConverter` as well as `@WritingConverter` to be used at the converter implementation.
|
||||
In case you write a `Converter` whose source and target type are native Mongo types there's no way for us to determine whether we should consider it as reading or writing converter. Registering the converter instance as both might lead to unwanted results then. E.g. a `Converter<String, Long>` is ambiguous although it probably does not make sense to try to convert all `String` instances into `Long` instances when writing. To be generally able to force the infrastructure to register a converter for one way only we provide `@ReadingConverter` as well as `@WritingConverter` to be used at the converter implementation.
|
||||
|
||||
[[mongo-template.index-and-collections]]
|
||||
== Index and Collection management
|
||||
|
||||
@@ -1,6 +1,103 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 1.8.0.RELEASE (2015-09-01)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1282 - Release 1.8 GA (Gosling).
|
||||
* DATAMONGO-1280 - Add what's new section to refrence documentation.
|
||||
* DATAMONGO-1275 - Reference documentation should mention support for optimistic locking.
|
||||
* DATAMONGO-1269 - QueryMapper drops numeric keys in Maps.
|
||||
* DATAMONGO-1256 - Provide a collectionName in MongoMappingEvents.
|
||||
|
||||
|
||||
Changes in version 1.8.0.RC1 (2015-08-04)
|
||||
-----------------------------------------
|
||||
* DATAMONGO-1268 - Release 1.8 RC1 (Gosling).
|
||||
* DATAMONGO-1266 - Repository query methods returning a primitive do not detect domain type correctly.
|
||||
* DATAMONGO-1260 - Prevent accidental authentication misconfiguration on SimpleMongoDbFactory.
|
||||
* DATAMONGO-1257 - mongo:mongo-client namespace does not support usernames with a comma.
|
||||
* DATAMONGO-1254 - Group after Project in aggregation uses incorrect field name.
|
||||
* DATAMONGO-1251 - update / findAndModify throws NullPointerException.
|
||||
* DATAMONGO-1250 - Custom converter implementation not used in updates.
|
||||
* DATAMONGO-1244 - StringBasedMongoQuery handles complex expression parameters incorrectly.
|
||||
* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile.
|
||||
* DATAMONGO-1236 - MongoOperations findAndModify and updateFirst do not include the _class in Map values.
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types.
|
||||
* DATAMONGO-1125 - Specify collection that triggers CommandFailureException.
|
||||
|
||||
|
||||
Changes in version 1.7.2.RELEASE (2015-07-28)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1261 - Release 1.7.2 (Fowler).
|
||||
* DATAMONGO-1260 - Prevent accidental authentication misconfiguration on SimpleMongoDbFactory.
|
||||
* DATAMONGO-1257 - mongo:mongo-client namespace does not support usernames with a comma.
|
||||
* DATAMONGO-1254 - Group after Project in aggregation uses incorrect field name.
|
||||
* DATAMONGO-1251 - update / findAndModify throws NullPointerException.
|
||||
* DATAMONGO-1250 - Custom converter implementation not used in updates.
|
||||
|
||||
|
||||
Changes in version 1.5.6.RELEASE (2015-07-01)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1246 - Release 1.5.6 (Dijkstra).
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0.
|
||||
|
||||
|
||||
Changes in version 1.6.3.RELEASE (2015-07-01)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1247 - Release 1.6.3 (Evans).
|
||||
* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile.
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation.
|
||||
* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release.
|
||||
* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types.
|
||||
* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0.
|
||||
* DATAMONGO-1153 - Fix documentation build.
|
||||
* DATAMONGO-1133 - Field aliasing is not honored in Aggregation operations.
|
||||
* DATAMONGO-1124 - Switch log level for cyclic reference index warnings from WARN to INFO.
|
||||
* DATAMONGO-1081 - Improve documentation on field mapping semantics.
|
||||
|
||||
|
||||
Changes in version 1.7.1.RELEASE (2015-06-30)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1248 - Release 1.7.1 (Fowler).
|
||||
* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile.
|
||||
* DATAMONGO-1234 - Fix typos in JavaDoc.
|
||||
* DATAMONGO-1232 - IgnoreCase should escape queries.
|
||||
* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path.
|
||||
* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility.
|
||||
* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up.
|
||||
* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1.
|
||||
* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation.
|
||||
* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality.
|
||||
* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc.
|
||||
* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item.
|
||||
* DATAMONGO-1202 - Indexed annotation problems under generics.
|
||||
* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release.
|
||||
* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver.
|
||||
* DATAMONGO-1166 - ReadPreference not used for Aggregations.
|
||||
* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types.
|
||||
|
||||
|
||||
Changes in version 1.8.0.M1 (2015-06-02)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1228 - Release 1.8 M1 (Gosling).
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 1.8 M1
|
||||
Spring Data MongoDB 1.8 GA
|
||||
Copyright (c) [2010-2015] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
|
||||
Reference in New Issue
Block a user