Compare commits
25 Commits
1.3.3.RELE
...
1.3.x
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f27f42976b | ||
|
|
752ea95fde | ||
|
|
3ea9dd9e73 | ||
|
|
c077ae8985 | ||
|
|
19e29c7e1d | ||
|
|
4f0b2d66a5 | ||
|
|
4b5c53e959 | ||
|
|
cb071ce05f | ||
|
|
e59911b42b | ||
|
|
61e87be306 | ||
|
|
1637f8d181 | ||
|
|
b9c8b7b234 | ||
|
|
741429a452 | ||
|
|
b75f4795ea | ||
|
|
a97980b04d | ||
|
|
2d8c666802 | ||
|
|
de0c4109d7 | ||
|
|
02abfced9c | ||
|
|
ec696618be | ||
|
|
ee43703100 | ||
|
|
cadc74932e | ||
|
|
fa4b4b97dd | ||
|
|
8392f4275f | ||
|
|
8ff1913ec7 | ||
|
|
57c7524c77 |
1
CONTRIBUTING.MD
Normal file
1
CONTRIBUTING.MD
Normal file
@@ -0,0 +1 @@
|
||||
You find the contribution guidelines for Spring Data projects [here](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md).
|
||||
@@ -26,7 +26,7 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
<version>1.3.5.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
|
||||
40
pom.xml
40
pom.xml
@@ -5,17 +5,17 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.3.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
<description>MongoDB support for Spring Data</description>
|
||||
<url>http://www.springsource.org/spring-data/mongodb</url>
|
||||
<url>http://projects.spring.io/spring-data-mongodb</url>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.2.0.RELEASE</version>
|
||||
<version>1.2.2.RELEASE</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.6.3.RELEASE</springdata.commons>
|
||||
<springdata.commons>1.6.5.RELEASE</springdata.commons>
|
||||
<mongo>2.10.1</mongo>
|
||||
</properties>
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
<id>ogierke</id>
|
||||
<name>Oliver Gierke</name>
|
||||
<email>ogierke at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Project Lean</role>
|
||||
@@ -49,7 +49,7 @@
|
||||
<id>trisberg</id>
|
||||
<name>Thomas Risberg</name>
|
||||
<email>trisberg at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
@@ -60,7 +60,7 @@
|
||||
<id>mpollack</id>
|
||||
<name>Mark Pollack</name>
|
||||
<email>mpollack at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
@@ -71,7 +71,7 @@
|
||||
<id>jbrisbin</id>
|
||||
<name>Jon Brisbin</name>
|
||||
<email>jbrisbin at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
@@ -82,7 +82,18 @@
|
||||
<id>tdarimont</id>
|
||||
<name>Thomas Darimont</name>
|
||||
<email>tdarimont at gopivotal.com</email>
|
||||
<organization>Pivotal Inc.</organization>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>cstrobl</id>
|
||||
<name>Christoph Strobl</name>
|
||||
<email>cstrobl at gopivotal.com</email>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
@@ -102,9 +113,16 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-lib-release</id>
|
||||
<url>http://repo.springsource.org/libs-release-local</url>
|
||||
<id>spring-lib-snapshot</id>
|
||||
<url>http://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>http://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.3.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.3.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.3.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.3.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.3.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -134,6 +134,13 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@@ -109,7 +109,9 @@ public abstract class AbstractMongoConfiguration {
|
||||
* entities.
|
||||
*/
|
||||
protected String getMappingBasePackage() {
|
||||
return getClass().getPackage().getName();
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -16,12 +16,14 @@
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ServerAddress;
|
||||
@@ -35,6 +37,11 @@ import com.mongodb.ServerAddress;
|
||||
*/
|
||||
public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
@@ -77,22 +84,53 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
*/
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
String[] hostAndPort = StringUtils.delimitedListToStringArray(source.trim(), ":");
|
||||
if (!StringUtils.hasText(source)) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(source) || hostAndPort.length > 2) {
|
||||
LOG.warn("Could not parse address source '{}'. Check your replica set configuration!", source);
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return hostAndPort.length == 1 ? new ServerAddress(hostAndPort[0]) : new ServerAddress(hostAndPort[0],
|
||||
Integer.parseInt(hostAndPort[1]));
|
||||
InetAddress hostAddress = InetAddress.getByName(hostAndPort[0]);
|
||||
Integer port = hostAndPort.length == 1 ? null : Integer.parseInt(hostAndPort[1]);
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
LOG.warn("Could not parse host '{}'. Check your replica set configuration!", hostAndPort[0]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
LOG.warn("Could not parse port '{}'. Check your replica set configuration!", hostAndPort[1]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the host and port from the given {@link String}.
|
||||
*
|
||||
* @param addressAndPortSource must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
||||
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null!");
|
||||
|
||||
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
||||
String hostAddress = hostAndPort[0];
|
||||
|
||||
if (isHostAddressInIPv6BracketNotation(hostAddress)) {
|
||||
hostAndPort[0] = hostAddress.substring(1, hostAddress.length() - 1);
|
||||
}
|
||||
|
||||
return hostAndPort;
|
||||
}
|
||||
|
||||
private boolean isHostAddressInIPv6BracketNotation(String hostAddress) {
|
||||
return hostAddress.startsWith("[") && hostAddress.endsWith("]");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -124,6 +124,7 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Chuong Ngo
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@@ -365,7 +366,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
Assert.notNull(query);
|
||||
|
||||
DBObject queryObject = query.getQueryObject();
|
||||
DBObject queryObject = queryMapper.getMappedObject(query.getQueryObject(), null);
|
||||
DBObject sortObject = query.getSortObject();
|
||||
DBObject fieldsObject = query.getFieldsObject();
|
||||
|
||||
@@ -992,6 +993,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
@@ -1009,7 +1012,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
: collection.update(queryObj, updateObj, upsert, multi, writeConcernToUse);
|
||||
|
||||
if (entity != null && entity.hasVersionProperty() && !multi) {
|
||||
if (writeResult.getN() == 0) {
|
||||
if (writeResult.getN() == 0 && dbObjectContainsVersionProperty(queryObj, entity)) {
|
||||
throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: "
|
||||
+ updateObj.toMap().toString() + " to collection " + collectionName);
|
||||
}
|
||||
@@ -1021,6 +1024,24 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
});
|
||||
}
|
||||
|
||||
private void increaseVersionForUpdateIfNecessary(MongoPersistentEntity<?> persistentEntity, Update update) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
if (!dbObjectContainsVersionProperty(update.getUpdateObject(), persistentEntity)) {
|
||||
update.inc(persistentEntity.getVersionProperty().getFieldName(), 1L);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean dbObjectContainsVersionProperty(DBObject dbObject, MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity == null || !persistentEntity.hasVersionProperty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return dbObject.containsField(persistentEntity.getVersionProperty().getFieldName());
|
||||
}
|
||||
|
||||
public void remove(Object object) {
|
||||
|
||||
if (object == null) {
|
||||
@@ -1547,8 +1568,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
DBObject mappedUpdate = queryMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,13 +17,13 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -56,6 +56,7 @@ import org.springframework.util.Assert;
|
||||
* .
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class CustomConversions {
|
||||
|
||||
@@ -67,7 +68,7 @@ public class CustomConversions {
|
||||
private final Set<ConvertiblePair> writingPairs;
|
||||
private final Set<Class<?>> customSimpleTypes;
|
||||
private final SimpleTypeHolder simpleTypeHolder;
|
||||
private final Map<Class<?>, HashMap<Class<?>, CacheValue>> cache;
|
||||
private final ConcurrentMap<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
|
||||
private final List<Object> converters;
|
||||
|
||||
@@ -90,7 +91,7 @@ public class CustomConversions {
|
||||
this.readingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.writingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.customSimpleTypes = new HashSet<Class<?>>();
|
||||
this.cache = new HashMap<Class<?>, HashMap<Class<?>, CacheValue>>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<GenericConverter.ConvertiblePair, CacheValue>();
|
||||
|
||||
this.converters = new ArrayList<Object>();
|
||||
this.converters.addAll(converters);
|
||||
@@ -195,25 +196,25 @@ public class CustomConversions {
|
||||
*
|
||||
* @param pair
|
||||
*/
|
||||
private void register(ConverterRegistration context) {
|
||||
private void register(ConverterRegistration converterRegistration) {
|
||||
|
||||
ConvertiblePair pair = context.getConvertiblePair();
|
||||
ConvertiblePair pair = converterRegistration.getConvertiblePair();
|
||||
|
||||
if (context.isReading()) {
|
||||
if (converterRegistration.isReading()) {
|
||||
|
||||
readingPairs.add(pair);
|
||||
|
||||
if (LOG.isWarnEnabled() && !context.isSimpleSourceType()) {
|
||||
if (LOG.isWarnEnabled() && !converterRegistration.isSimpleSourceType()) {
|
||||
LOG.warn(String.format(READ_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType()));
|
||||
}
|
||||
}
|
||||
|
||||
if (context.isWriting()) {
|
||||
if (converterRegistration.isWriting()) {
|
||||
|
||||
writingPairs.add(pair);
|
||||
customSimpleTypes.add(pair.getSourceType());
|
||||
|
||||
if (LOG.isWarnEnabled() && !context.isSimpleTargetType()) {
|
||||
if (LOG.isWarnEnabled() && !converterRegistration.isSimpleTargetType()) {
|
||||
LOG.warn(String.format(WRITE_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType()));
|
||||
}
|
||||
}
|
||||
@@ -223,11 +224,11 @@ public class CustomConversions {
|
||||
* Returns the target type to convert to in case we have a custom conversion registered to convert the given source
|
||||
* type into a Mongo native one.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source) {
|
||||
return getCustomWriteTarget(source, null);
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType) {
|
||||
return getCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -235,72 +236,78 @@ public class CustomConversions {
|
||||
* oth the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply return the
|
||||
* first target type matching or {@literal null} if no conversion can be found.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(source);
|
||||
return getCustomTarget(source, expectedTargetType, writingPairs);
|
||||
Assert.notNull(sourceType);
|
||||
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write into a Mongo native type. The returned type might
|
||||
* be a subclass oth the given expected type though.
|
||||
* be a subclass of the given expected type though.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> source) {
|
||||
return hasCustomWriteTarget(source, null);
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return hasCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write an object of the given source type into an object
|
||||
* of the given Mongo native target type.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param expectedTargetType
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
return getCustomWriteTarget(source, expectedTargetType) != null;
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return getCustomWriteTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to read the given source into the given target type.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomReadTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
public boolean hasCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(expectedTargetType);
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(requestedTargetType);
|
||||
|
||||
return getCustomReadTarget(source, expectedTargetType) != null;
|
||||
return getCustomReadTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair} for ones that have a source compatible type as source. Additionally
|
||||
* checks assignabilty of the target type if one is given.
|
||||
* checks assignability of the target type if one is given.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType
|
||||
* @param pairs must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @param pairs must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static Class<?> getCustomTarget(Class<?> source, Class<?> expectedTargetType, Iterable<ConvertiblePair> pairs) {
|
||||
private static Class<?> getCustomTarget(Class<?> sourceType, Class<?> requestedTargetType,
|
||||
Iterable<ConvertiblePair> pairs) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(pairs);
|
||||
|
||||
for (ConvertiblePair typePair : pairs) {
|
||||
if (typePair.getSourceType().isAssignableFrom(source)) {
|
||||
if (typePair.getSourceType().isAssignableFrom(sourceType)) {
|
||||
Class<?> targetType = typePair.getTargetType();
|
||||
if (expectedTargetType == null || targetType.isAssignableFrom(expectedTargetType)) {
|
||||
if (requestedTargetType == null || targetType.isAssignableFrom(requestedTargetType)) {
|
||||
return targetType;
|
||||
}
|
||||
}
|
||||
@@ -309,27 +316,33 @@ public class CustomConversions {
|
||||
return null;
|
||||
}
|
||||
|
||||
private Class<?> getCustomReadTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
/**
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Class<?> type = expectedTargetType == null ? PlaceholderType.class : expectedTargetType;
|
||||
Assert.notNull(sourceType);
|
||||
|
||||
Map<Class<?>, CacheValue> map;
|
||||
CacheValue toReturn;
|
||||
|
||||
if ((map = cache.get(source)) == null || (toReturn = map.get(type)) == null) {
|
||||
|
||||
Class<?> target = getCustomTarget(source, type, readingPairs);
|
||||
|
||||
if (cache.get(source) == null) {
|
||||
cache.put(source, new HashMap<Class<?>, CacheValue>());
|
||||
}
|
||||
|
||||
Map<Class<?>, CacheValue> value = cache.get(source);
|
||||
toReturn = target == null ? CacheValue.NULL : new CacheValue(target);
|
||||
value.put(type, toReturn);
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return toReturn.clazz;
|
||||
ConvertiblePair lookupKey = new ConvertiblePair(sourceType, requestedTargetType);
|
||||
CacheValue readTargetTypeValue = customReadTargetTypes.get(lookupKey);
|
||||
|
||||
if (readTargetTypeValue != null) {
|
||||
return readTargetTypeValue.getType();
|
||||
}
|
||||
|
||||
readTargetTypeValue = CacheValue.of(getCustomTarget(sourceType, requestedTargetType, readingPairs));
|
||||
CacheValue cacheValue = customReadTargetTypes.putIfAbsent(lookupKey, readTargetTypeValue);
|
||||
|
||||
return cacheValue != null ? cacheValue.getType() : readTargetTypeValue.getType();
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
@@ -338,8 +351,10 @@ public class CustomConversions {
|
||||
INSTANCE;
|
||||
|
||||
public Set<ConvertiblePair> getConvertibleTypes() {
|
||||
|
||||
ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class);
|
||||
ConvertiblePair booleanToString = new ConvertiblePair(Character.class, String.class);
|
||||
|
||||
return new HashSet<ConvertiblePair>(Arrays.asList(localeToString, booleanToString));
|
||||
}
|
||||
|
||||
@@ -348,29 +363,29 @@ public class CustomConversions {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Placeholder type to allow registering not-found values in the converter cache.
|
||||
*
|
||||
* @author Patryk Wasik
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class PlaceholderType {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper to safely store {@literal null} values in the type cache.
|
||||
*
|
||||
* @author Patryk Wasik
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class CacheValue {
|
||||
|
||||
public static final CacheValue NULL = new CacheValue(null);
|
||||
private final Class<?> clazz;
|
||||
private static final CacheValue ABSENT = new CacheValue(null);
|
||||
|
||||
public CacheValue(Class<?> clazz) {
|
||||
this.clazz = clazz;
|
||||
private final Class<?> type;
|
||||
|
||||
public CacheValue(Class<?> type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public Class<?> getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
static CacheValue of(Class<?> type) {
|
||||
return type == null ? ABSENT : new CacheValue(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import java.util.Set;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
@@ -30,6 +31,7 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -44,6 +46,7 @@ import com.mongodb.DBRef;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
@@ -101,7 +104,7 @@ public class QueryMapper {
|
||||
continue;
|
||||
}
|
||||
|
||||
Field field = entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
|
||||
Object rawValue = query.get(key);
|
||||
String newKey = field.getMappedKey();
|
||||
@@ -117,6 +120,17 @@ public class QueryMapper {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param entity
|
||||
* @param key
|
||||
* @param mappingContext
|
||||
* @return
|
||||
*/
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link DBObject} representing a keyword by mapping the keyword's value.
|
||||
*
|
||||
@@ -176,20 +190,22 @@ public class QueryMapper {
|
||||
|
||||
if (value instanceof DBObject) {
|
||||
DBObject valueDbo = (DBObject) value;
|
||||
DBObject resultDbo = new BasicDBObject(valueDbo.toMap());
|
||||
|
||||
if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
valueDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
resultDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
} else if (valueDbo.containsField("$ne")) {
|
||||
valueDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject((DBObject) value, null);
|
||||
return getMappedObject(resultDbo, null);
|
||||
}
|
||||
|
||||
return valueDbo;
|
||||
return resultDbo;
|
||||
|
||||
} else {
|
||||
return convertId(value);
|
||||
@@ -200,13 +216,28 @@ public class QueryMapper {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
}
|
||||
|
||||
if (documentField.isAssociation()) {
|
||||
if (isAssociationConversionNecessary(documentField, value)) {
|
||||
return convertAssociation(value, documentField.getProperty());
|
||||
}
|
||||
|
||||
return convertSimpleOrDBObject(value, documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Field} represents an association reference that together with the given value
|
||||
* requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the
|
||||
* type of the given value is compatible with the type of the given document field in order to deal with potential
|
||||
* query field exclusions, since MongoDB uses the {@code int} {@literal 0} as an indicator for an excluded field.
|
||||
*
|
||||
* @param documentField
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
private boolean isAssociationConversionNecessary(Field documentField, Object value) {
|
||||
return documentField.isAssociation() && value != null
|
||||
&& documentField.getProperty().getActualType().isAssignableFrom(value.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Retriggers mapping if the given source is a {@link DBObject} or simply invokes the
|
||||
*
|
||||
@@ -248,7 +279,8 @@ public class QueryMapper {
|
||||
*/
|
||||
private Object convertAssociation(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (property == null || !property.isAssociation()) {
|
||||
if (property == null || !property.isAssociation() || source == null || source instanceof DBRef
|
||||
|| !property.isEntity()) {
|
||||
return source;
|
||||
}
|
||||
|
||||
@@ -270,7 +302,7 @@ public class QueryMapper {
|
||||
return result;
|
||||
}
|
||||
|
||||
return source == null || source instanceof DBRef ? source : converter.toDBRef(source, property);
|
||||
return converter.toDBRef(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -381,7 +413,7 @@ public class QueryMapper {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class Field {
|
||||
protected static class Field {
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
|
||||
@@ -458,12 +490,14 @@ public class QueryMapper {
|
||||
* Extension of {@link DocumentField} to be backed with mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class MetadataBackedField extends Field {
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistentPropertyPath<MongoPersistentProperty> path;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and
|
||||
@@ -483,7 +517,7 @@ public class QueryMapper {
|
||||
this.entity = entity;
|
||||
this.mappingContext = context;
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
this.path = getPath(name);
|
||||
this.property = path == null ? null : path.getLeafProperty();
|
||||
}
|
||||
|
||||
@@ -548,19 +582,33 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public String getMappedKey() {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
return path == null ? name : path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE);
|
||||
return path == null ? name : path.toDotPath(getPropertyConverter());
|
||||
}
|
||||
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String name) {
|
||||
/**
|
||||
* Returns the {@link PersistentPropertyPath} for the given <code>pathExpression</code>.
|
||||
*
|
||||
* @param pathExpression
|
||||
* @return
|
||||
*/
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
try {
|
||||
PropertyPath path = PropertyPath.from(name, entity.getTypeInformation());
|
||||
PropertyPath path = PropertyPath.from(pathExpression, entity.getTypeInformation());
|
||||
return mappingContext.getPersistentPropertyPath(path);
|
||||
} catch (PropertyReferenceException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Converter} to be used to created the mapped key. Default implementation will use
|
||||
* {@link PropertyToFieldNameConverter}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,12 +15,21 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* A subclass of {@link QueryMapper} that retains type information on the mongo types.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class UpdateMapper extends QueryMapper {
|
||||
|
||||
@@ -49,4 +58,90 @@ public class UpdateMapper extends QueryMapper {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext)
|
||||
*/
|
||||
@Override
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext) : //
|
||||
new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MetadataBackedField} that handles {@literal $} paths inside a field key. We clean up an update key
|
||||
* containing a {@literal $} before handing it to the super class to make sure property lookups and transformations
|
||||
* continue to work as expected. We provide a custom property converter to re-applied the cleaned up {@literal $}s
|
||||
* when constructing the mapped key.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class MetadataBackedUpdateField extends MetadataBackedField {
|
||||
|
||||
private final String key;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given {@link MongoPersistentEntity}, key and
|
||||
* {@link MappingContext}. We clean up the key before handing it up to the super class to make sure it continues to
|
||||
* work as expected.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$", ""), entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getPropertyConverter()
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new UpdatePropertyConverter(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Special {@link Converter} for {@link MongoPersistentProperty} instances that will concatenate the {@literal $}
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
*
|
||||
* @param updateKey must not be {@literal null} or empty.
|
||||
*/
|
||||
public UpdatePropertyConverter(String updateKey) {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(updateKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
return iterator.hasNext() && iterator.next().equals("$") ? String.format("%s.$", mappedName) : mappedName;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class NearQuery {
|
||||
|
||||
@@ -143,10 +144,12 @@ public class NearQuery {
|
||||
/**
|
||||
* Configures the {@link Pageable} to use.
|
||||
*
|
||||
* @param pageable
|
||||
* @param pageable must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public NearQuery with(Pageable pageable) {
|
||||
|
||||
Assert.notNull(pageable, "Pageable must not be 'null'.");
|
||||
this.num = pageable.getOffset() + pageable.getPageSize();
|
||||
this.skip = pageable.getOffset();
|
||||
return this;
|
||||
@@ -311,13 +314,18 @@ public class NearQuery {
|
||||
/**
|
||||
* Adds an actual query to the {@link NearQuery} to restrict the objects considered for the actual near operation.
|
||||
*
|
||||
* @param query
|
||||
* @param query must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public NearQuery query(Query query) {
|
||||
|
||||
Assert.notNull(query, "Cannot apply 'null' query on NearQuery.");
|
||||
this.query = query;
|
||||
this.skip = query.getSkip();
|
||||
this.num = query.getLimit();
|
||||
|
||||
if (query.getLimit() != 0) {
|
||||
this.num = query.getLimit();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.gridfs.GridFSFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface GridFsOperations extends ResourcePatternResolver {
|
||||
|
||||
@@ -126,7 +127,7 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* Returns all {@link GridFsResource} with the given file name.
|
||||
*
|
||||
* @param filename
|
||||
* @return
|
||||
* @return the resource if it exists or {@literal null}.
|
||||
* @see ResourcePatternResolver#getResource(String)
|
||||
*/
|
||||
GridFsResource getResource(String filename);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,6 +43,7 @@ import com.mongodb.gridfs.GridFSInputFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver {
|
||||
|
||||
@@ -190,7 +191,9 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
* @see org.springframework.core.io.ResourceLoader#getResource(java.lang.String)
|
||||
*/
|
||||
public GridFsResource getResource(String location) {
|
||||
return new GridFsResource(findOne(query(whereFilename().is(location))));
|
||||
|
||||
GridFSDBFile file = findOne(query(whereFilename().is(location)));
|
||||
return file != null ? new GridFsResource(file) : null;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -79,22 +79,24 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query query = createQuery(new ConvertingParameterAccessor(operations.getConverter(), accessor));
|
||||
|
||||
Object result = null;
|
||||
|
||||
if (method.isGeoNearQuery() && method.isPageQuery()) {
|
||||
|
||||
MongoParameterAccessor countAccessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query countQuery = createCountQuery(new ConvertingParameterAccessor(operations.getConverter(), countAccessor));
|
||||
|
||||
return new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
result = new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
} else if (method.isGeoNearQuery()) {
|
||||
return new GeoNearExecution(accessor).execute(query);
|
||||
} else if (method.isCollectionQuery()) {
|
||||
return new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
result = new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
} else if (method.isPageQuery()) {
|
||||
return new PagedExecution(accessor.getPageable()).execute(query);
|
||||
result = new PagedExecution(accessor.getPageable()).execute(query);
|
||||
} else {
|
||||
result = new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
}
|
||||
|
||||
Object result = new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
|
||||
if (result == null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Sample configuration class in default package.
|
||||
*
|
||||
* @see DATAMONGO-877
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Configuration
|
||||
public class ConfigClassInDefaultPackage extends AbstractMongoConfiguration {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#getDatabaseName()
|
||||
*/
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "default";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#mongo()
|
||||
*/
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import org.junit.Test;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
|
||||
/**
|
||||
* Unit test for {@link ConfigClassInDefaultPackage}.
|
||||
*
|
||||
* @see DATAMONGO-877
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class ConfigClassInDefaultPackageUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-877
|
||||
*/
|
||||
@Test
|
||||
public void loadsConfigClassFromDefaultPackage() {
|
||||
new AnnotationConfigApplicationContext(ConfigClassInDefaultPackage.class).close();
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,7 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Ignore;
|
||||
@@ -40,8 +41,7 @@ import com.mongodb.ServerAddress;
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceReplicaSetTests {
|
||||
|
||||
@Autowired
|
||||
private ApplicationContext ctx;
|
||||
@Autowired private ApplicationContext ctx;
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -53,7 +53,10 @@ public class MongoNamespaceReplicaSetTests {
|
||||
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
|
||||
|
||||
assertThat(replicaSetSeeds, is(notNullValue()));
|
||||
assertThat(replicaSetSeeds, hasItems(new ServerAddress("127.0.0.1", 10001), new ServerAddress("localhost", 10002)));
|
||||
assertThat(
|
||||
replicaSetSeeds,
|
||||
hasItems(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001),
|
||||
new ServerAddress(InetAddress.getByName("localhost"), 10002)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -18,12 +18,15 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
@@ -35,6 +38,8 @@ import com.mongodb.ServerAddress;
|
||||
*/
|
||||
public class ServerAddressPropertyEditorUnitTests {
|
||||
|
||||
@Rule public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
ServerAddressPropertyEditor editor;
|
||||
|
||||
@Before
|
||||
@@ -81,11 +86,111 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
assertNull(editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackShort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "::1";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackShortWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "::1";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Here we detect no port since the last segment of the address contains leading zeros.
|
||||
*
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackLong() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:0001";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackLongWithBrackets() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "[0000:0000:0000:0000:0000:0000:0000:0001]";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* We can't tell whether the last part of the hostAddress represents a port or not.
|
||||
*
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void shouldFailToHandleAmbiguousIPv6HostaddressLongWithoutPortAndWithoutBrackets() throws UnknownHostException {
|
||||
|
||||
expectedException.expect(IllegalArgumentException.class);
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:128";
|
||||
editor.setAsText(hostAddress);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressExampleAddressWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:0001";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressExampleAddressInBracketsWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "[0000:0000:0000:0000:0000:0000:0000:0001]";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
private static void assertSingleAddressOfLocalhost(Object result) throws UnknownHostException {
|
||||
assertSingleAddressWithPort("localhost", null, result);
|
||||
}
|
||||
|
||||
private static void assertSingleAddressWithPort(String hostAddress, Integer port, Object result)
|
||||
throws UnknownHostException {
|
||||
|
||||
assertThat(result, is(instanceOf(ServerAddress[].class)));
|
||||
Collection<ServerAddress> addresses = Arrays.asList((ServerAddress[]) result);
|
||||
assertThat(addresses, hasSize(1));
|
||||
assertThat(addresses, hasItem(new ServerAddress("localhost")));
|
||||
if (port == null) {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress))));
|
||||
} else {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress), port)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,7 +70,7 @@ public abstract class DBObjectUtils {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T getTypedValue(DBObject source, String key, Class<T> type) {
|
||||
public static <T> T getTypedValue(DBObject source, String key, Class<T> type) {
|
||||
|
||||
Object value = source.get(key);
|
||||
assertThat(value, is(notNullValue()));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -51,6 +51,7 @@ import org.springframework.dao.OptimisticLockingFailureException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
@@ -70,6 +71,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBCollection;
|
||||
@@ -91,6 +93,7 @@ import com.mongodb.WriteResult;
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Komi Innocent
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
@@ -160,6 +163,8 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(ObjectWith3AliasedFields.class);
|
||||
template.dropCollection(ObjectWith3AliasedFieldsAndNestedAddress.class);
|
||||
template.dropCollection(BaseDoc.class);
|
||||
template.dropCollection(ObjectWithEnumValue.class);
|
||||
template.dropCollection(DocumentWithCollection.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1796,12 +1801,12 @@ public class MongoTemplateTests {
|
||||
|
||||
Document doc = new Document();
|
||||
doc.id = "4711";
|
||||
doc.model = new ModelA().withValue("foo");
|
||||
doc.model = new ModelA("foo");
|
||||
template.insert(doc);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(doc.id));
|
||||
String newModelValue = "bar";
|
||||
Update update = Update.update("model", new ModelA().withValue(newModelValue));
|
||||
Update update = Update.update("model", new ModelA(newModelValue));
|
||||
template.updateFirst(query, update, Document.class);
|
||||
|
||||
Document result = template.findOne(query, Document.class);
|
||||
@@ -2055,25 +2060,248 @@ public class MongoTemplateTests {
|
||||
assertThat(result.get(0).field, is(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-816
|
||||
*/
|
||||
@Test
|
||||
public void shouldExecuteQueryShouldMapQueryBeforeQueryExecution() {
|
||||
|
||||
ObjectWithEnumValue o = new ObjectWithEnumValue();
|
||||
o.value = EnumValue.VALUE2;
|
||||
template.save(o);
|
||||
|
||||
Query q = Query.query(Criteria.where("value").in(EnumValue.VALUE2));
|
||||
|
||||
template.executeQuery(q, StringUtils.uncapitalize(ObjectWithEnumValue.class.getSimpleName()),
|
||||
new DocumentCallbackHandler() {
|
||||
|
||||
@Override
|
||||
public void processDocument(DBObject dbObject) throws MongoException, DataAccessException {
|
||||
|
||||
assertThat(dbObject, is(notNullValue()));
|
||||
|
||||
ObjectWithEnumValue result = template.getConverter().read(ObjectWithEnumValue.class, dbObject);
|
||||
|
||||
assertThat(result.value, is(EnumValue.VALUE2));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateFirstShouldIncreaseVersionForVersionedEntity() {
|
||||
|
||||
VersionedPerson person = new VersionedPerson();
|
||||
person.firstname = "Dave";
|
||||
person.lastname = "Matthews";
|
||||
template.save(person);
|
||||
assertThat(person.id, is(notNullValue()));
|
||||
|
||||
Query qry = query(where("id").is(person.id));
|
||||
VersionedPerson personAfterFirstSave = template.findOne(qry, VersionedPerson.class);
|
||||
assertThat(personAfterFirstSave.version, is(0L));
|
||||
|
||||
template.updateFirst(qry, Update.update("lastname", "Bubu"), VersionedPerson.class);
|
||||
|
||||
VersionedPerson personAfterUpdateFirst = template.findOne(qry, VersionedPerson.class);
|
||||
assertThat(personAfterUpdateFirst.version, is(1L));
|
||||
assertThat(personAfterUpdateFirst.lastname, is("Bubu"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateFirstShouldIncreaseVersionOnlyForFirstMatchingEntity() {
|
||||
|
||||
VersionedPerson person1 = new VersionedPerson();
|
||||
person1.firstname = "Dave";
|
||||
|
||||
VersionedPerson person2 = new VersionedPerson();
|
||||
person2.firstname = "Dave";
|
||||
|
||||
template.save(person1);
|
||||
template.save(person2);
|
||||
Query q = query(where("id").in(person1.id, person2.id));
|
||||
|
||||
template.updateFirst(q, Update.update("lastname", "Metthews"), VersionedPerson.class);
|
||||
|
||||
for (VersionedPerson p : template.find(q, VersionedPerson.class)) {
|
||||
if ("Metthews".equals(p.lastname)) {
|
||||
assertThat(p.version, equalTo(Long.valueOf(1)));
|
||||
} else {
|
||||
assertThat(p.version, equalTo(Long.valueOf(0)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateMultiShouldIncreaseVersionOfAllUpdatedEntities() {
|
||||
|
||||
VersionedPerson person1 = new VersionedPerson();
|
||||
person1.firstname = "Dave";
|
||||
|
||||
VersionedPerson person2 = new VersionedPerson();
|
||||
person2.firstname = "Dave";
|
||||
|
||||
template.save(person1);
|
||||
template.save(person2);
|
||||
|
||||
Query q = query(where("id").in(person1.id, person2.id));
|
||||
template.updateMulti(q, Update.update("lastname", "Metthews"), VersionedPerson.class);
|
||||
|
||||
for (VersionedPerson p : template.find(q, VersionedPerson.class)) {
|
||||
assertThat(p.version, equalTo(Long.valueOf(1)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-686
|
||||
*/
|
||||
@Test
|
||||
public void itShouldBePossibleToReuseAnExistingQuery() {
|
||||
|
||||
Sample sample = new Sample();
|
||||
sample.id = "42";
|
||||
sample.field = "A";
|
||||
|
||||
template.save(sample);
|
||||
|
||||
Query query = new Query();
|
||||
query.addCriteria(where("_id").in("42", "43"));
|
||||
|
||||
assertThat(template.count(query, Sample.class), is(1L));
|
||||
|
||||
query.with(new PageRequest(0, 10));
|
||||
query.with(new Sort("field"));
|
||||
|
||||
assertThat(template.find(query, Sample.class), is(not(empty())));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldRetrainTypeInformationWithinUpdatedType() {
|
||||
|
||||
Document document = new Document();
|
||||
document.model = new ModelA("value1");
|
||||
|
||||
template.save(document);
|
||||
|
||||
Query query = query(where("id").is(document.id));
|
||||
Update update = Update.update("model", new ModelA("value2"));
|
||||
template.findAndModify(query, update, Document.class);
|
||||
|
||||
Document retrieved = template.findOne(query, Document.class);
|
||||
Assert.assertThat(retrieved.model, instanceOf(ModelA.class));
|
||||
Assert.assertThat(retrieved.model.value(), equalTo("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updatesShouldRetainTypeInformationEvenForCollections() {
|
||||
|
||||
DocumentWithCollection doc = new DocumentWithCollection();
|
||||
doc.id = "4711";
|
||||
doc.model = new ArrayList<Model>();
|
||||
doc.model.add(new ModelA("foo"));
|
||||
template.insert(doc);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(doc.id));
|
||||
query.addCriteria(where("model.value").is("foo"));
|
||||
String newModelValue = "bar";
|
||||
Update update = Update.update("model.$", new ModelA(newModelValue));
|
||||
template.updateFirst(query, update, DocumentWithCollection.class);
|
||||
|
||||
Query findQuery = new Query(Criteria.where("id").is(doc.id));
|
||||
DocumentWithCollection result = template.findOne(findQuery, DocumentWithCollection.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.id, is(doc.id));
|
||||
assertThat(result.model, is(notNullValue()));
|
||||
assertThat(result.model, hasSize(1));
|
||||
assertThat(result.model.get(0).value(), is(newModelValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONOGO-828
|
||||
*/
|
||||
@Test
|
||||
public void updateFirstShouldDoNothingWhenCalledForEntitiesThatDoNotExist() {
|
||||
|
||||
Query q = query(where("id").is(Long.MIN_VALUE));
|
||||
|
||||
template.updateFirst(q, Update.update("lastname", "supercalifragilisticexpialidocious"), VersionedPerson.class);
|
||||
assertThat(template.findOne(q, VersionedPerson.class), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-773
|
||||
*/
|
||||
@Test
|
||||
public void testShouldSupportQueryWithIncludedDbRefField() {
|
||||
|
||||
Sample sample = new Sample("47111", "foo");
|
||||
template.save(sample);
|
||||
|
||||
DocumentWithDBRefCollection doc = new DocumentWithDBRefCollection();
|
||||
doc.id = "4711";
|
||||
doc.dbRefProperty = sample;
|
||||
|
||||
template.save(doc);
|
||||
|
||||
Query qry = query(where("id").is(doc.id));
|
||||
qry.fields().include("dbRefProperty");
|
||||
|
||||
List<DocumentWithDBRefCollection> result = template.find(qry, DocumentWithDBRefCollection.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result, hasSize(1));
|
||||
assertThat(result.get(0), is(notNullValue()));
|
||||
assertThat(result.get(0).dbRefProperty, is(notNullValue()));
|
||||
assertThat(result.get(0).dbRefProperty.field, is(sample.field));
|
||||
}
|
||||
|
||||
static class DocumentWithDBRefCollection {
|
||||
|
||||
@Id public String id;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
public List<Sample> dbRefAnnotatedList;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
public Sample dbRefProperty;
|
||||
}
|
||||
|
||||
static class DocumentWithCollection {
|
||||
|
||||
@Id public String id;
|
||||
public List<Model> model;
|
||||
}
|
||||
|
||||
static interface Model {
|
||||
String value();
|
||||
|
||||
Model withValue(String value);
|
||||
}
|
||||
|
||||
static class ModelA implements Model {
|
||||
|
||||
private String value;
|
||||
|
||||
@Override
|
||||
public String value() {
|
||||
return this.value;
|
||||
ModelA(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Model withValue(String value) {
|
||||
this.value = value;
|
||||
return this;
|
||||
public String value() {
|
||||
return this.value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2098,6 +2326,13 @@ public class MongoTemplateTests {
|
||||
|
||||
@Id String id;
|
||||
String field;
|
||||
|
||||
public Sample() {}
|
||||
|
||||
public Sample(String id, String field) {
|
||||
this.id = id;
|
||||
this.field = field;
|
||||
}
|
||||
}
|
||||
|
||||
static class TestClass {
|
||||
@@ -2180,4 +2415,14 @@ public class MongoTemplateTests {
|
||||
static class ObjectWith3AliasedFieldsAndNestedAddress extends ObjectWith3AliasedFields {
|
||||
@Field("adr") Address address;
|
||||
}
|
||||
|
||||
static enum EnumValue {
|
||||
VALUE1, VALUE2, VALUE3
|
||||
}
|
||||
|
||||
static class ObjectWithEnumValue {
|
||||
|
||||
@Id String id;
|
||||
EnumValue value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -56,6 +56,7 @@ import com.mongodb.QueryBuilder;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class QueryMapperUnitTests {
|
||||
@@ -466,6 +467,55 @@ public class QueryMapperUnitTests {
|
||||
assertThat(result.get("myvalue"), is((Object) "$center"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void shouldExcludeDBRefAssociation() {
|
||||
|
||||
Query query = query(where("someString").is("foo"));
|
||||
query.fields().exclude("reference");
|
||||
|
||||
BasicMongoPersistentEntity<?> entity = context.getPersistentEntity(WithDBRef.class);
|
||||
DBObject queryResult = mapper.getMappedObject(query.getQueryObject(), entity);
|
||||
DBObject fieldsResult = mapper.getMappedObject(query.getFieldsObject(), entity);
|
||||
|
||||
assertThat(queryResult.get("someString"), is((Object) "foo"));
|
||||
assertThat(fieldsResult.get("reference"), is((Object) 0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-686
|
||||
*/
|
||||
@Test
|
||||
public void queryMapperShouldNotChangeStateInGivenQueryObjectWhenIdConstrainedByInList() {
|
||||
|
||||
BasicMongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(Sample.class);
|
||||
String idPropertyName = persistentEntity.getIdProperty().getName();
|
||||
DBObject queryObject = query(where(idPropertyName).in("42")).getQueryObject();
|
||||
|
||||
Object idValuesBefore = getAsDBObject(queryObject, idPropertyName).get("$in");
|
||||
mapper.getMappedObject(queryObject, persistentEntity);
|
||||
Object idValuesAfter = getAsDBObject(queryObject, idPropertyName).get("$in");
|
||||
|
||||
assertThat(idValuesAfter, is(idValuesBefore));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-773
|
||||
*/
|
||||
@Test
|
||||
public void queryMapperShouldBeAbleToProcessQueriesThatIncludeDbRefFields() {
|
||||
|
||||
BasicMongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(WithDBRef.class);
|
||||
|
||||
Query qry = query(where("someString").is("abc"));
|
||||
qry.fields().include("reference");
|
||||
|
||||
DBObject mappedFields = mapper.getMappedObject(qry.getFieldsObject(), persistentEntity);
|
||||
assertThat(mappedFields, is(notNullValue()));
|
||||
}
|
||||
|
||||
class IdWrapper {
|
||||
Object id;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,25 +17,33 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectUtils.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.DBObjectUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link UpdateMapper}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class UpdateMapperUnitTests {
|
||||
@@ -43,11 +51,26 @@ public class UpdateMapperUnitTests {
|
||||
@Mock MongoDbFactory factory;
|
||||
MappingMongoConverter converter;
|
||||
MongoMappingContext context;
|
||||
UpdateMapper mapper;
|
||||
|
||||
private Converter<NestedEntity, DBObject> writingConverterSpy;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Before
|
||||
public void setUp() {
|
||||
context = new MongoMappingContext();
|
||||
converter = new MappingMongoConverter(factory, context);
|
||||
|
||||
this.writingConverterSpy = Mockito.spy(new NestedEntityWriteConverter());
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(writingConverterSpy));
|
||||
|
||||
this.context = new MongoMappingContext();
|
||||
this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
|
||||
this.context.initialize();
|
||||
|
||||
this.converter = new MappingMongoConverter(factory, context);
|
||||
this.converter.setCustomConversions(conversions);
|
||||
this.converter.afterPropertiesSet();
|
||||
|
||||
this.mapper = new UpdateMapper(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -62,32 +85,170 @@ public class UpdateMapperUnitTests {
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject push = DBObjectUtils.getAsDBObject(mappedObject, "$push");
|
||||
DBObject list = DBObjectUtils.getAsDBObject(push, "list");
|
||||
DBObject push = getAsDBObject(mappedObject, "$push");
|
||||
DBObject list = getAsDBObject(push, "aliased");
|
||||
|
||||
assertThat(list.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldRetainTypeInformationForNestedEntities() {
|
||||
|
||||
Update update = Update.update("model", new ModelImpl(1));
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
DBObject modelDbObject = (DBObject) set.get("model");
|
||||
assertThat(modelDbObject.get("_class"), not(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldNotPersistTypeInformationForKnownSimpleTypes() {
|
||||
|
||||
Update update = Update.update("model.value", 1);
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("_class"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldNotPersistTypeInformationForNullValues() {
|
||||
|
||||
Update update = Update.update("model", null);
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("_class"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldRetainTypeInformationForNestedCollectionElements() {
|
||||
|
||||
Update update = Update.update("list.$", new ConcreteChildClass("42", "bubu"));
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
DBObject modelDbObject = getAsDBObject(set, "aliased.$");
|
||||
assertThat(modelDbObject.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldSupportNestedCollectionElementUpdates() {
|
||||
|
||||
Update update = Update.update("list.$.value", "foo").set("list.$.otherValue", "bar");
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("aliased.$.value"), is((Object) "foo"));
|
||||
assertThat(set.get("aliased.$.otherValue"), is((Object) "bar"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldWriteTypeInformationForComplexNestedCollectionElementUpdates() {
|
||||
|
||||
Update update = Update.update("list.$.value", "foo").set("list.$.someObject", new ConcreteChildClass("42", "bubu"));
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject dbo = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(dbo.get("aliased.$.value"), is((Object) "foo"));
|
||||
|
||||
DBObject someObject = getAsDBObject(dbo, "aliased.$.someObject");
|
||||
assertThat(someObject, is(notNullValue()));
|
||||
assertThat(someObject.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
assertThat(someObject.get("value"), is((Object) "bubu"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-410
|
||||
*/
|
||||
@Test
|
||||
public void testUpdateMapperShouldConsiderCustomWriteTarget() {
|
||||
|
||||
List<NestedEntity> someValues = Arrays.asList(new NestedEntity("spring"), new NestedEntity("data"),
|
||||
new NestedEntity("mongodb"));
|
||||
NestedEntity[] array = new NestedEntity[someValues.size()];
|
||||
|
||||
Update update = new Update().pushAll("collectionOfNestedEntities", someValues.toArray(array));
|
||||
mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DomainEntity.class));
|
||||
|
||||
verify(writingConverterSpy, times(3)).convert(Mockito.any(NestedEntity.class));
|
||||
}
|
||||
|
||||
static interface Model {}
|
||||
|
||||
static class ModelImpl implements Model {
|
||||
public int value;
|
||||
|
||||
public ModelImpl(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
public class ModelWrapper {
|
||||
Model model;
|
||||
}
|
||||
|
||||
static class ParentClass {
|
||||
|
||||
String id;
|
||||
|
||||
@Field("aliased")//
|
||||
List<? extends AbstractChildClass> list;
|
||||
|
||||
public ParentClass(String id, List<? extends AbstractChildClass> list) {
|
||||
this.id = id;
|
||||
this.list = list;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static abstract class AbstractChildClass {
|
||||
|
||||
String id;
|
||||
String value;
|
||||
String otherValue;
|
||||
AbstractChildClass someObject;
|
||||
|
||||
public AbstractChildClass(String id, String value) {
|
||||
this.id = id;
|
||||
this.value = value;
|
||||
this.otherValue = "other_" + value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,4 +258,27 @@ public class UpdateMapperUnitTests {
|
||||
super(id, value);
|
||||
}
|
||||
}
|
||||
|
||||
static class DomainEntity {
|
||||
List<NestedEntity> collectionOfNestedEntities;
|
||||
}
|
||||
|
||||
static class NestedEntity {
|
||||
String name;
|
||||
|
||||
public NestedEntity(String name) {
|
||||
super();
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
static class NestedEntityWriteConverter implements Converter<NestedEntity, DBObject> {
|
||||
|
||||
@Override
|
||||
public DBObject convert(NestedEntity source) {
|
||||
return new BasicDBObject();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import static org.springframework.data.mongodb.core.query.Update.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -55,6 +56,7 @@ import com.mongodb.MongoException;
|
||||
/**
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingTests {
|
||||
|
||||
@@ -512,28 +514,96 @@ public class MappingTests {
|
||||
assertThat(result.items.get(0).id, is(items.id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void supportExcludeDbRefAssociation() {
|
||||
|
||||
template.dropCollection(Item.class);
|
||||
template.dropCollection(Container.class);
|
||||
|
||||
Item item = new Item();
|
||||
template.insert(item);
|
||||
|
||||
Container container = new Container("foo");
|
||||
container.item = item;
|
||||
|
||||
template.insert(container);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is("foo"));
|
||||
query.fields().exclude("item");
|
||||
Container result = template.findOne(query, Container.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.item, is(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void shouldMapFieldsOfIterableEntity() {
|
||||
|
||||
template.dropCollection(IterableItem.class);
|
||||
template.dropCollection(Container.class);
|
||||
|
||||
Item item = new IterableItem();
|
||||
item.value = "bar";
|
||||
template.insert(item);
|
||||
|
||||
Container container = new Container("foo");
|
||||
container.item = item;
|
||||
|
||||
template.insert(container);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is("foo"));
|
||||
Container result = template.findOne(query, Container.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.item, is(notNullValue()));
|
||||
assertThat(result.item.value, is("bar"));
|
||||
}
|
||||
|
||||
static class Container {
|
||||
|
||||
@Id
|
||||
final String id;
|
||||
@Id final String id;
|
||||
|
||||
public Container() {
|
||||
id = new ObjectId().toString();
|
||||
}
|
||||
|
||||
@DBRef
|
||||
Item item;
|
||||
@DBRef
|
||||
List<Item> items;
|
||||
public Container(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@DBRef Item item;
|
||||
@DBRef List<Item> items;
|
||||
}
|
||||
|
||||
static class Item {
|
||||
|
||||
@Id
|
||||
final String id;
|
||||
@Id final String id;
|
||||
String value;
|
||||
|
||||
public Item() {
|
||||
this.id = new ObjectId().toString();
|
||||
}
|
||||
}
|
||||
|
||||
static class IterableItem extends Item implements Iterable<ItemData> {
|
||||
|
||||
List<ItemData> data = new ArrayList<MappingTests.ItemData>();
|
||||
|
||||
@Override
|
||||
public Iterator<ItemData> iterator() {
|
||||
return data.iterator();
|
||||
}
|
||||
}
|
||||
|
||||
static class ItemData {
|
||||
|
||||
String id;
|
||||
String value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,6 +21,7 @@ import static org.junit.Assert.*;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.mongodb.core.DBObjectUtils;
|
||||
import org.springframework.data.mongodb.core.geo.Distance;
|
||||
import org.springframework.data.mongodb.core.geo.Metric;
|
||||
import org.springframework.data.mongodb.core.geo.Metrics;
|
||||
@@ -31,6 +32,7 @@ import org.springframework.data.mongodb.core.geo.Point;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class NearQueryUnitTests {
|
||||
|
||||
@@ -123,4 +125,36 @@ public class NearQueryUnitTests {
|
||||
assertThat(query.getSkip(), is(pageable.getPageNumber() * pageable.getPageSize()));
|
||||
assertThat((Integer) query.toDBObject().get("num"), is((pageable.getPageNumber() + 1) * pageable.getPageSize()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-829
|
||||
*/
|
||||
@Test
|
||||
public void nearQueryShouldInoreZeroLimitFromQuery() {
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 2)).query(Query.query(Criteria.where("foo").is("bar")));
|
||||
assertThat(query.toDBObject().get("num"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONOGO-829
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void nearQueryShouldThrowExceptionWhenGivenANullQuery() {
|
||||
NearQuery.near(new Point(1, 2)).query(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-829
|
||||
*/
|
||||
@Test
|
||||
public void numShouldNotBeAlteredByQueryWithoutPageable() {
|
||||
|
||||
int num = 100;
|
||||
NearQuery query = NearQuery.near(new Point(1, 2));
|
||||
query.num(num);
|
||||
query.query(Query.query(Criteria.where("foo").is("bar")));
|
||||
|
||||
assertThat(DBObjectUtils.getTypedValue(query.toDBObject(), "num", Integer.class), is(num));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -42,15 +42,15 @@ import com.mongodb.gridfs.GridFSFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:gridfs/gridfs.xml")
|
||||
public class GridFsTemplateIIntegrationTests {
|
||||
public class GridFsTemplateIntegrationTests {
|
||||
|
||||
Resource resource = new ClassPathResource("gridfs/gridfs.xml");
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
@Autowired GridFsOperations operations;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
@@ -127,6 +127,14 @@ public class GridFsTemplateIIntegrationTests {
|
||||
assertThat(resources[0].getContentType(), is(reference.getContentType()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-813
|
||||
*/
|
||||
@Test
|
||||
public void getResourceShouldReturnNullForNonExistingResource() {
|
||||
assertThat(operations.getResource("doesnotexist"), is(nullValue()));
|
||||
}
|
||||
|
||||
private static void assertSame(GridFSFile left, GridFSFile right) {
|
||||
|
||||
assertThat(left.getId(), is(right.getId()));
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -680,4 +680,16 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
assertThat(results.isLastPage(), is(true));
|
||||
assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-871
|
||||
*/
|
||||
@Test
|
||||
public void findsPersonsByFirstnameAsArray() {
|
||||
|
||||
Person[] result = repository.findByThePersonsFirstnameAsArray("Leroi");
|
||||
|
||||
assertThat(result, is(arrayWithSize(1)));
|
||||
assertThat(result, is(arrayContaining(leroi)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -69,6 +69,12 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
@Query(value = "{ 'firstname' : ?0 }", fields = "{ 'firstname': 1, 'lastname': 1}")
|
||||
List<Person> findByThePersonsFirstname(String firstname);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-871
|
||||
*/
|
||||
@Query(value = "{ 'firstname' : ?0 }")
|
||||
Person[] findByThePersonsFirstnameAsArray(String firstname);
|
||||
|
||||
/**
|
||||
* Returns all {@link Person}s with a firstname matching the given one (*-wildcard supported).
|
||||
*
|
||||
|
||||
@@ -28,6 +28,7 @@ import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.repository.PersonRepository;
|
||||
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
@@ -39,15 +40,11 @@ import org.springframework.test.util.ReflectionTestUtils;
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class MongoRepositoryFactoryBeanUnitTests {
|
||||
|
||||
@Mock
|
||||
MongoOperations operations;
|
||||
@Mock MongoOperations operations;
|
||||
|
||||
@Mock
|
||||
MongoConverter converter;
|
||||
@Mock MongoConverter converter;
|
||||
|
||||
@Mock
|
||||
@SuppressWarnings("rawtypes")
|
||||
MappingContext context;
|
||||
@Mock @SuppressWarnings("rawtypes") MappingContext context;
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("rawtypes")
|
||||
@@ -75,6 +72,7 @@ public class MongoRepositoryFactoryBeanUnitTests {
|
||||
when(operations.getConverter()).thenReturn(converter);
|
||||
when(converter.getMappingContext()).thenReturn(context);
|
||||
|
||||
factoryBean.setRepositoryInterface(PersonRepository.class);
|
||||
factoryBean.setMongoOperations(operations);
|
||||
factoryBean.afterPropertiesSet();
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
<xi:include href="introduction/why-sd-doc.xml"/>
|
||||
<xi:include href="introduction/requirements.xml"/>
|
||||
<xi:include href="introduction/getting-started.xml"/>
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.3.RELEASE/src/docbkx/repositories.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.6.5.RELEASE/src/docbkx/repositories.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repositories.xml" />
|
||||
</xi:include>
|
||||
</part>
|
||||
@@ -76,10 +76,10 @@
|
||||
<part id="appendix">
|
||||
<title>Appendix</title>
|
||||
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.3.RELEASE/src/docbkx/repository-namespace-reference.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.6.5.RELEASE/src/docbkx/repository-namespace-reference.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repository-namespace-reference.xml" />
|
||||
</xi:include>
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.3.RELEASE/src/docbkx/repository-query-keywords-reference.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.6.5.RELEASE/src/docbkx/repository-query-keywords-reference.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repository-query-keywords-reference.xml" />
|
||||
</xi:include>
|
||||
</part>
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
<title>Community Forum</title>
|
||||
|
||||
<para>The Spring Data <ulink
|
||||
url="http://forum.springframework.org/forumdisplay.php?f=80">forum
|
||||
url="http://forum.spring.io/forum/spring-projects/data/nosql">forum
|
||||
</ulink> is a message board for all Spring Data (not just Document)
|
||||
users to share information and help each other. Note that registration
|
||||
is needed <emphasis>only</emphasis> for posting.</para>
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
<para>Professional, from-the-source support, with guaranteed response
|
||||
time, is available from <ulink
|
||||
url="http://www.springsource.com">SpringSource</ulink>, the company
|
||||
url="http://www.gopivotal.com/">Pivotal Software, Inc.</ulink>, the company
|
||||
behind Spring Data and Spring.</para>
|
||||
</section>
|
||||
</section>
|
||||
@@ -40,12 +40,12 @@
|
||||
|
||||
<para>For information on the Spring Data Mongo source code repository,
|
||||
nightly builds and snapshot artifacts please see the <ulink
|
||||
url="http://www.springsource.org/spring-data/mongodb">Spring Data Mongo
|
||||
url="http://projects.spring.io/spring-data-mongodb">Spring Data Mongo
|
||||
homepage</ulink>.</para>
|
||||
|
||||
<para>You can help make Spring Data best serve the needs of the Spring
|
||||
community by interacting with developers through the Spring Community
|
||||
<ulink url="http://forum.springsource.org">forums</ulink>. To follow
|
||||
<ulink url="http://forum.spring.io">forums</ulink>. To follow
|
||||
developer activity look for the mailing list information on the Spring
|
||||
Data Mongo homepage.</para>
|
||||
|
||||
@@ -55,10 +55,10 @@
|
||||
|
||||
<para>To stay up to date with the latest news and announcements in the
|
||||
Spring eco system, subscribe to the Spring Community <ulink
|
||||
url="http://www.springframework.org/">Portal</ulink>.</para>
|
||||
url="http://spring.io">Portal</ulink>.</para>
|
||||
|
||||
<para>Lastly, you can follow the SpringSource Data <ulink
|
||||
url="http://blog.springsource.com/category/data-access/">blog </ulink>or
|
||||
url="http://spring.io/blog/">blog </ulink>or
|
||||
the project team on Twitter (<ulink
|
||||
url="http://twitter.com/SpringData">SpringData</ulink>)</para>
|
||||
</section>
|
||||
|
||||
@@ -12,17 +12,17 @@
|
||||
<title>Knowing Spring</title>
|
||||
|
||||
<para>Spring Data uses Spring framework's <ulink
|
||||
url="http://static.springframework.org/spring/docs/3.0.x/reference/html/spring-core.html">core</ulink>
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/spring-core.html">core</ulink>
|
||||
functionality, such as the <ulink
|
||||
url="http://static.springframework.org/spring/docs/3.0.x/reference/html/beans.html">IoC</ulink>
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/beans.html">IoC</ulink>
|
||||
container, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/validation.html#core-convert">type
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/validation.html#core-convert">type
|
||||
conversion system</ulink>, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/expressions.html">expression
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/expressions.html">expression
|
||||
language</ulink>, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/jmx.html">JMX
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/jmx.html">JMX
|
||||
integration</ulink>, and portable <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/dao.html#dao-exceptions">DAO
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/dao.html#dao-exceptions">DAO
|
||||
exception hierarchy</ulink>. While it is not important to know the
|
||||
Spring APIs, understanding the concepts behind them is. At a minimum,
|
||||
the idea behind IoC should be familiar for whatever IoC container you
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<para>Spring Data Document 1.x binaries requires JDK level 6.0 and above,
|
||||
and
|
||||
<ulink url="http://www.springsource.org/documentation">Spring Framework</ulink>
|
||||
<ulink url="http://spring.io/docs">Spring Framework</ulink>
|
||||
3.0.x and above.
|
||||
</para>
|
||||
<para>
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
you perform administrative operations such as drop or create a database. The
|
||||
JMX features build upon the JMX feature set available in the Spring
|
||||
Framework. See <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/jmx.html">here
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/jmx.html">here
|
||||
</ulink> for more details.</para>
|
||||
|
||||
<section id="mongodb:jmx-configuration">
|
||||
|
||||
@@ -638,7 +638,7 @@ public class Person {
|
||||
<para>Spring 3.0 introduced a core.convert package that provides a
|
||||
general type conversion system. This is described in detail in the
|
||||
Spring reference documentation section entitled <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/validation.html#core-convert">Spring
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/validation.html#core-convert">Spring
|
||||
3 Type Conversion</ulink>.</para>
|
||||
</note>
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@
|
||||
or higher. The latest production release (2.0.x as of this writing) is
|
||||
recommended. An easy way to bootstrap setting up a working environment is
|
||||
to create a Spring based project in <ulink
|
||||
url="http://www.springsource.com/developer/sts">STS</ulink>.</para>
|
||||
url="http://spring.io/tools/sts">STS</ulink>.</para>
|
||||
|
||||
<para>First you need to set up a running Mongodb server. Refer to the
|
||||
<ulink url="http://www.mongodb.org/display/DOCS/Quickstart">Mongodb Quick
|
||||
@@ -264,9 +264,9 @@ public class MongoApp {
|
||||
<para>For those not familiar with how to configure the Spring
|
||||
container using Java based bean metadata instead of XML based metadata
|
||||
see the high level introduction in the reference docs <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/new-in-3.html#new-java-configuration"
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/new-in-3.html#new-java-configuration"
|
||||
userlevel="">here </ulink> as well as the detailed documentation<ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/beans.html#beans-java-instantiating-container">
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/beans.html#beans-java-instantiating-container">
|
||||
here</ulink>.</para>
|
||||
</note></para>
|
||||
|
||||
@@ -310,7 +310,7 @@ public class AppConfig {
|
||||
classes annoated with the <literal>@Repository</literal> annotation.
|
||||
This hierarchy and use of <literal>@Repository</literal> is described in
|
||||
<ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/dao.html">Spring's
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/dao.html">Spring's
|
||||
DAO support features</ulink>.</para>
|
||||
|
||||
<para>An example of a Java based bean metadata that supports exception
|
||||
@@ -1986,7 +1986,7 @@ GeoResults<Restaurant> = operations.geoNear(query, Restaurant.class);</pro
|
||||
methods on MongoOperations to simplify the creation and execution of
|
||||
Map-Reduce operations. It can convert the results of a Map-Reduce
|
||||
operation to a POJO also integrates with Spring's <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/resources.html">Resource
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/resources.html">Resource
|
||||
abstraction</ulink> abstraction. This will let you place your JavaScript
|
||||
files on the file system, classpath, http server or any other Spring
|
||||
Resource implementation and then reference the JavaScript resources via an
|
||||
@@ -2100,7 +2100,7 @@ MapReduceResults<ValueObject> results = mongoOperations.mapReduce(query, "
|
||||
providing methods on MongoOperations to simplify the creation and
|
||||
execution of group operations. It can convert the results of the group
|
||||
operation to a POJO and also integrates with Spring's <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/resources.html">Resource
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/resources.html">Resource
|
||||
abstraction</ulink> abstraction. This will let you place your JavaScript
|
||||
files on the file system, classpath, http server or any other Spring
|
||||
Resource implementation and then reference the JavaScript resources via an
|
||||
@@ -2710,7 +2710,7 @@ List<DBObject> resultList = result.getMappedResults();</programlisting>
|
||||
<note>
|
||||
<para>For more information on the Spring type conversion service see the
|
||||
reference docs <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/validation.html#core-convert">here</ulink>.</para>
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/validation.html#core-convert">here</ulink>.</para>
|
||||
</note>
|
||||
|
||||
<section id="mongo.custom-converters.writer">
|
||||
@@ -3069,7 +3069,7 @@ mongoTemplate.dropCollection("MyNewCollection"); </programlisting>
|
||||
interface.</para>
|
||||
|
||||
<para>The motivation behind mapping to Spring's <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/dao.html#dao-exceptions">consistent
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/dao.html#dao-exceptions">consistent
|
||||
data access exception hierarchy</ulink> is that you are then able to write
|
||||
portable and descriptive exception handling code without resorting to
|
||||
coding against <ulink
|
||||
@@ -3215,8 +3215,8 @@ mongoTemplate.dropCollection("MyNewCollection"); </programlisting>
|
||||
</beans></programlisting>
|
||||
</example>
|
||||
|
||||
<para>You can no get the template injected and perform storing and
|
||||
retrieving operations to it.</para>
|
||||
<para>The template can now be injected and used to perform storage and
|
||||
retrieval operations.</para>
|
||||
|
||||
<example>
|
||||
<title>Using GridFsTemplate to store files</title>
|
||||
|
||||
@@ -1,6 +1,85 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 1.3.5.RELEASE (2014-03-10)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-829] - NearQuery, when used in conjunction with a Query, no longer sets num=0, unless Query specifies otherwise.
|
||||
* [DATAMONGO-871] - Repository queries support array return type.
|
||||
** Improvement
|
||||
* [DATAMONGO-865] - Avoid ClassNotFoundException during test runs.
|
||||
|
||||
Changes in version 1.4.0.RELEASE (2014-02-24)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-354] - MongoTemplate should support multiple $pushAll in one update.
|
||||
* [DATAMONGO-404] - Removing a DBRef using pull does not work.
|
||||
* [DATAMONGO-410] - Update with pushAll should recognize defined Converter.
|
||||
* [DATAMONGO-812] - $pushAll is deprecated since mongodb 2.4 move to $push $each.
|
||||
* [DATAMONGO-830] - Fix NPE during cache warmup in CustomConversions.
|
||||
* [DATAMONGO-838] - Support for refering to expression based field in group operation.
|
||||
* [DATAMONGO-840] - Support for nested MongoDB field references in SpEL expressions within Projections.
|
||||
* [DATAMONGO-842] - Fix documentation error in GRIDFS section.
|
||||
* [DATAMONGO-852] - Increase version for update should traverse DBObject correctly in order to find version property.
|
||||
|
||||
** Improvement
|
||||
* [DATAMONGO-468] - Simplification for updates of DBRef fields with mongoTemplate.
|
||||
* [DATAMONGO-849] - Documentation on github should not reference invalid class.
|
||||
|
||||
** Task
|
||||
* [DATAMONGO-848] - Ensure compatibility with Mongo Java driver 2.12.
|
||||
* [DATAMONGO-853] - Update no longer allows null keys.
|
||||
* [DATAMONGO-856] - Update documentation.
|
||||
|
||||
Changes in version 1.3.4.RELEASE (2014-02-17)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-407] - Collection with generics losing element type after $set update
|
||||
* [DATAMONGO-410] - Update with pushAll doesnt recognize defined Converter
|
||||
* [DATAMONGO-686] - ClassCastException while reusing Query object
|
||||
* [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException
|
||||
* [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException
|
||||
* [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly
|
||||
* [DATAMONGO-811] - updateFirst methods do not increment @Version field
|
||||
* [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums.
|
||||
* [DATAMONGO-828] - UpdateFirst throws OptimisticLockingFailureException when updating document that does not exist
|
||||
* [DATAMONGO-830] - NPE during cache warmup in CustomConversions
|
||||
* [DATAMONGO-842] - Documentation error in GRIDFS section
|
||||
** Improvement
|
||||
* [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file
|
||||
** Task
|
||||
* [DATAMONGO-824] - Add contribution guidelines
|
||||
* [DATAMONGO-846] - Release 1.3.4
|
||||
|
||||
Changes in version 1.4.0.RC1 (2014-01-29)
|
||||
-----------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-407] - Collection with generics losing element type after $set update
|
||||
* [DATAMONGO-686] - ClassCastException while reusing Query object
|
||||
* [DATAMONGO-726] - References to non existing classes in namespace XSD
|
||||
* [DATAMONGO-804] - EnableMongoRepositories repositoryImplementationPostfix() default is empty String instead of "Impl"
|
||||
* [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException
|
||||
* [DATAMONGO-806] - Spring Data MongoDB - Aggregation Framework - No property _id found for type com.entity.User
|
||||
* [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException
|
||||
* [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly
|
||||
* [DATAMONGO-811] - updateFirst methods do not increment @Version field
|
||||
* [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums.
|
||||
** Improvement
|
||||
* [DATAMONGO-778] - Create geospatial index of type other than 2d with @GeoSpatialIndexed
|
||||
* [DATAMONGO-785] - Add support for geospatial 2Dsphere and geohaystack index types
|
||||
* [DATAMONGO-787] - Guard against SpEL issue in Spring 3.2.4
|
||||
* [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x
|
||||
* [DATAMONGO-802] - Change AbstractMongoConfiguration.mongoDbFactory() to return MongoDbFactory
|
||||
* [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file
|
||||
* [DATAMONGO-822] - Add support for eager CDI repository instantiation
|
||||
* [DATAMONGO-823] - Add bucket attribute to <mongo:gridFsTemplate />
|
||||
* [DATAMONGO-837] - Upgrade mongodb java driver to 2.11.4
|
||||
** Task
|
||||
* [DATAMONGO-790] - Ensure compatibility with Spring Framework 4.0
|
||||
* [DATAMONGO-824] - Add contribution guidelines
|
||||
* [DATAMONGO-826] - Release Spring Data MongoDB 1.4.0.RC1
|
||||
* [DATAMONGO-835] - Code cleanups
|
||||
|
||||
Changes in version 1.3.3.RELEASE (2013-12-11)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
@@ -19,7 +98,7 @@ Changes in version 1.3.3.RELEASE (2013-12-11)
|
||||
* [DATAMONGO-810] - Release 1.3.3
|
||||
|
||||
Changes in version 1.4.0.M1 (2013-11-19)
|
||||
---------------------------------------------
|
||||
----------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-534] - The GridFs query execution does not return sorted resources, when the sorting fields are defined in the query definition
|
||||
* [DATAMONGO-630] - Add support of $setOnInsert modifier for upsert
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Spring Data Document 1.3.3.RELEASE
|
||||
Copyright (c) [2010-2013] Pivotal Inc.
|
||||
Spring Data MongoDB 1.3.5.RELEASE
|
||||
Copyright (c) [2010-2014] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
You may not use this product except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
SPRING DATA MongoDB 1.3.3.RELEASE
|
||||
-----------------------------
|
||||
Spring Data MongoDB 1.3.5.RELEASE
|
||||
---------------------------------
|
||||
|
||||
Spring Data MongoDB is released under the terms of the Apache Software License Version 2.0 (see license.txt).
|
||||
|
||||
@@ -13,5 +13,5 @@ The reference manual and javadoc are located in the 'docs' directory.
|
||||
|
||||
ADDITIONAL RESOURCES:
|
||||
|
||||
Spring Data Homepage: http://www.springsource.org/spring-data
|
||||
Spring Data Forum: http://forum.springsource.org/forumdisplay.php?f=80
|
||||
Spring Data Homepage: http://projects.spring.io/spring-data
|
||||
Spring Data Forum: http://forum.spring.io/forum/spring-projects/data/nosql
|
||||
|
||||
Reference in New Issue
Block a user