Compare commits
53 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f27f42976b | ||
|
|
752ea95fde | ||
|
|
3ea9dd9e73 | ||
|
|
c077ae8985 | ||
|
|
19e29c7e1d | ||
|
|
4f0b2d66a5 | ||
|
|
4b5c53e959 | ||
|
|
cb071ce05f | ||
|
|
e59911b42b | ||
|
|
61e87be306 | ||
|
|
1637f8d181 | ||
|
|
b9c8b7b234 | ||
|
|
741429a452 | ||
|
|
b75f4795ea | ||
|
|
a97980b04d | ||
|
|
2d8c666802 | ||
|
|
de0c4109d7 | ||
|
|
02abfced9c | ||
|
|
ec696618be | ||
|
|
ee43703100 | ||
|
|
cadc74932e | ||
|
|
fa4b4b97dd | ||
|
|
8392f4275f | ||
|
|
8ff1913ec7 | ||
|
|
57c7524c77 | ||
|
|
f79636240c | ||
|
|
f22bf106db | ||
|
|
ff72150518 | ||
|
|
099689c00d | ||
|
|
1d5df555f0 | ||
|
|
54e6a80ca9 | ||
|
|
f66d773a94 | ||
|
|
899afe1fe7 | ||
|
|
ee9a6993b1 | ||
|
|
5d7e12a4fa | ||
|
|
8b3da2d7f9 | ||
|
|
49af31bb6e | ||
|
|
c2aacc03ff | ||
|
|
1cf544a530 | ||
|
|
bbb097cafc | ||
|
|
feafd50b59 | ||
|
|
b51cf05f90 | ||
|
|
b8196ac9ed | ||
|
|
e643d39fa6 | ||
|
|
6abdb0aa46 | ||
|
|
34063ff647 | ||
|
|
857f366b56 | ||
|
|
f7540d45c6 | ||
|
|
3d2ae8117f | ||
|
|
6b3bd8f621 | ||
|
|
b17ec47003 | ||
|
|
8c7b558d39 | ||
|
|
a3faabf718 |
1
CONTRIBUTING.MD
Normal file
1
CONTRIBUTING.MD
Normal file
@@ -0,0 +1 @@
|
||||
You find the contribution guidelines for Spring Data projects [here](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md).
|
||||
@@ -26,7 +26,7 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.2.3.RELEASE</version>
|
||||
<version>1.3.5.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
@@ -36,7 +36,7 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.0.BUILD-SNAPSHOT</version>
|
||||
<version>1.4.0.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
|
||||
65
pom.xml
65
pom.xml
@@ -5,17 +5,17 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
<description>MongoDB support for Spring Data</description>
|
||||
<url>http://www.springsource.org/spring-data/mongodb</url>
|
||||
<url>http://projects.spring.io/spring-data-mongodb</url>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.2.0.RELEASE</version>
|
||||
<version>1.2.2.RELEASE</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.6.0.RELEASE</springdata.commons>
|
||||
<springdata.commons>1.6.5.RELEASE</springdata.commons>
|
||||
<mongo>2.10.1</mongo>
|
||||
</properties>
|
||||
|
||||
@@ -37,9 +37,9 @@
|
||||
<developer>
|
||||
<id>ogierke</id>
|
||||
<name>Oliver Gierke</name>
|
||||
<email>ogierke at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>ogierke at gopivotal.com</email>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Project Lean</role>
|
||||
</roles>
|
||||
@@ -48,9 +48,9 @@
|
||||
<developer>
|
||||
<id>trisberg</id>
|
||||
<name>Thomas Risberg</name>
|
||||
<email>trisberg at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>trisberg at gopivotal.com</email>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
@@ -59,9 +59,9 @@
|
||||
<developer>
|
||||
<id>mpollack</id>
|
||||
<name>Mark Pollack</name>
|
||||
<email>mpollack at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>mpollack at gopivotal.com</email>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
@@ -70,14 +70,36 @@
|
||||
<developer>
|
||||
<id>jbrisbin</id>
|
||||
<name>Jon Brisbin</name>
|
||||
<email>jbrisbin at vmware.com</email>
|
||||
<organization>SpringSource</organization>
|
||||
<organizationUrl>http://www.springsource.com</organizationUrl>
|
||||
<email>jbrisbin at gopivotal.com</email>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>-6</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>tdarimont</id>
|
||||
<name>Thomas Darimont</name>
|
||||
<email>tdarimont at gopivotal.com</email>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>cstrobl</id>
|
||||
<name>Christoph Strobl</name>
|
||||
<email>cstrobl at gopivotal.com</email>
|
||||
<organization>Pivotal Software, Inc.</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<dependencies>
|
||||
@@ -91,9 +113,16 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-lib-release</id>
|
||||
<url>http://repo.springsource.org/libs-release-local</url>
|
||||
<id>spring-lib-snapshot</id>
|
||||
<url>http://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>http://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.3.0.RELEASE</version>
|
||||
<version>1.3.6.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -134,6 +134,13 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@@ -109,7 +109,9 @@ public abstract class AbstractMongoConfiguration {
|
||||
* entities.
|
||||
*/
|
||||
protected String getMappingBasePackage() {
|
||||
return getClass().getPackage().getName();
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -16,12 +16,14 @@
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ServerAddress;
|
||||
@@ -35,6 +37,11 @@ import com.mongodb.ServerAddress;
|
||||
*/
|
||||
public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
@@ -77,22 +84,53 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
*/
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
String[] hostAndPort = StringUtils.delimitedListToStringArray(source.trim(), ":");
|
||||
if (!StringUtils.hasText(source)) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(source) || hostAndPort.length > 2) {
|
||||
LOG.warn("Could not parse address source '{}'. Check your replica set configuration!", source);
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return hostAndPort.length == 1 ? new ServerAddress(hostAndPort[0]) : new ServerAddress(hostAndPort[0],
|
||||
Integer.parseInt(hostAndPort[1]));
|
||||
InetAddress hostAddress = InetAddress.getByName(hostAndPort[0]);
|
||||
Integer port = hostAndPort.length == 1 ? null : Integer.parseInt(hostAndPort[1]);
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
LOG.warn("Could not parse host '{}'. Check your replica set configuration!", hostAndPort[0]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
LOG.warn("Could not parse port '{}'. Check your replica set configuration!", hostAndPort[1]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the host and port from the given {@link String}.
|
||||
*
|
||||
* @param addressAndPortSource must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
||||
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null!");
|
||||
|
||||
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
||||
String hostAddress = hostAndPort[0];
|
||||
|
||||
if (isHostAddressInIPv6BracketNotation(hostAddress)) {
|
||||
hostAndPort[0] = hostAddress.substring(1, hostAddress.length() - 1);
|
||||
}
|
||||
|
||||
return hostAndPort;
|
||||
}
|
||||
|
||||
private boolean isHostAddressInIPv6BracketNotation(String hostAddress) {
|
||||
return hostAddress.startsWith("[") && hostAddress.endsWith("]");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@@ -22,7 +24,6 @@ import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.data.mongodb.core.query.Order;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBCollection;
|
||||
@@ -34,9 +35,13 @@ import com.mongodb.MongoException;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Komi Innocent
|
||||
*/
|
||||
public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
|
||||
@@ -135,12 +140,17 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if (Integer.valueOf(1).equals(value)) {
|
||||
indexFields.add(IndexField.create(key, Order.ASCENDING));
|
||||
} else if (Integer.valueOf(-1).equals(value)) {
|
||||
indexFields.add(IndexField.create(key, Order.DESCENDING));
|
||||
} else if ("2d".equals(value)) {
|
||||
if ("2d".equals(value)) {
|
||||
indexFields.add(IndexField.geo(key));
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -124,6 +124,7 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Chuong Ngo
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@@ -365,7 +366,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
Assert.notNull(query);
|
||||
|
||||
DBObject queryObject = query.getQueryObject();
|
||||
DBObject queryObject = queryMapper.getMappedObject(query.getQueryObject(), null);
|
||||
DBObject sortObject = query.getSortObject();
|
||||
DBObject fieldsObject = query.getFieldsObject();
|
||||
|
||||
@@ -700,10 +701,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
initializeVersionProperty(objectToSave);
|
||||
|
||||
BasicDBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
writer.write(objectToSave, dbDoc);
|
||||
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
Object id = insertDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
@@ -712,6 +712,26 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(objectToSave, dbDoc));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param objectToSave
|
||||
* @param writer
|
||||
* @return
|
||||
*/
|
||||
private <T> DBObject toDbObject(T objectToSave, MongoWriter<T> writer) {
|
||||
|
||||
if (!(objectToSave instanceof String)) {
|
||||
DBObject dbDoc = new BasicDBObject();
|
||||
writer.write(objectToSave, dbDoc);
|
||||
return dbDoc;
|
||||
} else {
|
||||
try {
|
||||
return (DBObject) JSON.parse((String) objectToSave);
|
||||
} catch (JSONParseException e) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void initializeVersionProperty(Object entity) {
|
||||
|
||||
MongoPersistentEntity<?> mongoPersistentEntity = getPersistentEntity(entity.getClass());
|
||||
@@ -851,19 +871,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
assertUpdateableIdIfNotSet(objectToSave);
|
||||
|
||||
DBObject dbDoc = new BasicDBObject();
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<T>(objectToSave));
|
||||
|
||||
if (!(objectToSave instanceof String)) {
|
||||
writer.write(objectToSave, dbDoc);
|
||||
} else {
|
||||
try {
|
||||
dbDoc = (DBObject) JSON.parse((String) objectToSave);
|
||||
} catch (JSONParseException e) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", e);
|
||||
}
|
||||
}
|
||||
DBObject dbDoc = toDbObject(objectToSave, writer);
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<T>(objectToSave, dbDoc));
|
||||
Object id = saveDBObject(collectionName, dbDoc, objectToSave.getClass());
|
||||
@@ -983,6 +993,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
@@ -1000,7 +1012,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
: collection.update(queryObj, updateObj, upsert, multi, writeConcernToUse);
|
||||
|
||||
if (entity != null && entity.hasVersionProperty() && !multi) {
|
||||
if (writeResult.getN() == 0) {
|
||||
if (writeResult.getN() == 0 && dbObjectContainsVersionProperty(queryObj, entity)) {
|
||||
throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: "
|
||||
+ updateObj.toMap().toString() + " to collection " + collectionName);
|
||||
}
|
||||
@@ -1012,6 +1024,24 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
});
|
||||
}
|
||||
|
||||
private void increaseVersionForUpdateIfNecessary(MongoPersistentEntity<?> persistentEntity, Update update) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
if (!dbObjectContainsVersionProperty(update.getUpdateObject(), persistentEntity)) {
|
||||
update.inc(persistentEntity.getVersionProperty().getFieldName(), 1L);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean dbObjectContainsVersionProperty(DBObject dbObject, MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity == null || !persistentEntity.hasVersionProperty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return dbObject.containsField(persistentEntity.getVersionProperty().getFieldName());
|
||||
}
|
||||
|
||||
public void remove(Object object) {
|
||||
|
||||
if (object == null) {
|
||||
@@ -1538,8 +1568,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
DBObject mappedUpdate = queryMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
|
||||
@@ -48,6 +48,15 @@ public class Aggregation {
|
||||
|
||||
private final List<AggregationOperation> operations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(List<? extends AggregationOperation> operations) {
|
||||
return newAggregation(operations.toArray(new AggregationOperation[operations.size()]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
@@ -57,6 +66,16 @@ public class Aggregation {
|
||||
return new Aggregation(operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static <T> TypedAggregation<T> newAggregation(Class<T> type, List<? extends AggregationOperation> operations) {
|
||||
return newAggregation(type, operations.toArray(new AggregationOperation[operations.size()]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
@@ -227,8 +246,9 @@ public class Aggregation {
|
||||
|
||||
operationDocuments.add(operation.toDBObject(context));
|
||||
|
||||
if (operation instanceof AggregationOperationContext) {
|
||||
context = (AggregationOperationContext) operation;
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ import org.springframework.util.CompositeIterator;
|
||||
* Value object to capture the fields exposed by an {@link AggregationOperation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ExposedFields implements Iterable<ExposedField> {
|
||||
@@ -151,13 +152,47 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes no non-synthetic fields at all.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesNoNonSyntheticFields() {
|
||||
return originalFields.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes a single non-synthetic field only.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesSingleNonSyntheticFieldOnly() {
|
||||
return originalFields.size() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes no fields at all.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean exposesNoFields() {
|
||||
return exposedFieldsCount() == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link ExposedFields} exposes a single field only.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public boolean exposesSingleFieldOnly() {
|
||||
return originalFields.size() + syntheticFields.size() == 1;
|
||||
boolean exposesSingleFieldOnly() {
|
||||
return exposedFieldsCount() == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
private int exposedFieldsCount() {
|
||||
return originalFields.size() + syntheticFields.size();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -224,6 +259,15 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
return field.getTarget();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#isAliased()
|
||||
*/
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return field.isAliased();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field can be referred to using the given name.
|
||||
*
|
||||
@@ -309,6 +353,16 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
return field.synthetic ? target : String.format("%s.%s", Fields.UNDERSCORE_ID, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the referenve value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Object getReferenceValue() {
|
||||
return field.synthetic && !field.isAliased() ? 1 : toString();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
|
||||
@@ -17,17 +17,32 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Support class to implement {@link AggregationOperation}s that will become an {@link AggregationOperationContext} as
|
||||
* well defining {@link ExposedFields}.
|
||||
* {@link AggregationOperationContext} that combines the available field references from a given
|
||||
* {@code AggregationOperationContext} and an {@link FieldsExposingAggregationOperation}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
* @since 1.4
|
||||
*/
|
||||
public abstract class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
private final ExposedFields exposedFields;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}.
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
*/
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields) {
|
||||
|
||||
Assert.notNull(exposedFields, "ExposedFields must not be null!");
|
||||
this.exposedFields = exposedFields;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -54,7 +69,7 @@ public abstract class ExposedFieldsAggregationOperationContext implements Aggreg
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
|
||||
ExposedField field = getFields().getField(name);
|
||||
ExposedField field = exposedFields.getField(name);
|
||||
|
||||
if (field != null) {
|
||||
return new FieldReference(field);
|
||||
@@ -62,6 +77,4 @@ public abstract class ExposedFieldsAggregationOperationContext implements Aggreg
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
}
|
||||
|
||||
protected abstract ExposedFields getFields();
|
||||
}
|
||||
|
||||
@@ -36,4 +36,11 @@ public interface Field {
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
String getTarget();
|
||||
|
||||
/**
|
||||
* Returns whether the Field is aliased, which means it has a name set different from the target.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean isAliased();
|
||||
}
|
||||
|
||||
@@ -224,6 +224,15 @@ public class Fields implements Iterable<Field> {
|
||||
return StringUtils.hasText(this.target) ? this.target : this.name;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Field#isAliased()
|
||||
*/
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return !getName().equals(getTarget());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperation} that exposes new {@link ExposedFields} that can be used for later aggregation pipeline
|
||||
* {@code AggregationOperation}s.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface FieldsExposingAggregationOperation extends AggregationOperation {
|
||||
|
||||
/**
|
||||
* Returns the fields exposed by the {@link AggregationOperation}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
ExposedFields getFields();
|
||||
}
|
||||
@@ -38,9 +38,13 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class GroupOperation extends ExposedFieldsAggregationOperationContext implements AggregationOperation {
|
||||
public class GroupOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
/**
|
||||
* Holds the non-synthetic fields which are the fields of the group-id structure.
|
||||
*/
|
||||
private final ExposedFields idFields;
|
||||
|
||||
private final ExposedFields nonSynthecticFields;
|
||||
private final List<Operation> operations;
|
||||
|
||||
/**
|
||||
@@ -50,7 +54,7 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
*/
|
||||
public GroupOperation(Fields fields) {
|
||||
|
||||
this.nonSynthecticFields = ExposedFields.nonSynthetic(fields);
|
||||
this.idFields = ExposedFields.nonSynthetic(fields);
|
||||
this.operations = new ArrayList<Operation>();
|
||||
}
|
||||
|
||||
@@ -74,7 +78,7 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
Assert.notNull(groupOperation, "GroupOperation must not be null!");
|
||||
Assert.notNull(nextOperations, "NextOperations must not be null!");
|
||||
|
||||
this.nonSynthecticFields = groupOperation.nonSynthecticFields;
|
||||
this.idFields = groupOperation.idFields;
|
||||
this.operations = new ArrayList<Operation>(nextOperations.size() + 1);
|
||||
this.operations.addAll(groupOperation.operations);
|
||||
this.operations.addAll(nextOperations);
|
||||
@@ -261,7 +265,7 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
ExposedFields fields = this.nonSynthecticFields.and(new ExposedField(Fields.UNDERSCORE_ID, true));
|
||||
ExposedFields fields = this.idFields.and(new ExposedField(Fields.UNDERSCORE_ID, true));
|
||||
|
||||
for (Operation operation : operations) {
|
||||
fields = fields.and(operation.asField());
|
||||
@@ -279,16 +283,20 @@ public class GroupOperation extends ExposedFieldsAggregationOperationContext imp
|
||||
|
||||
BasicDBObject operationObject = new BasicDBObject();
|
||||
|
||||
if (nonSynthecticFields.exposesSingleFieldOnly()) {
|
||||
if (idFields.exposesNoNonSyntheticFields()) {
|
||||
|
||||
FieldReference reference = context.getReference(nonSynthecticFields.iterator().next());
|
||||
operationObject.put(Fields.UNDERSCORE_ID, null);
|
||||
|
||||
} else if (idFields.exposesSingleNonSyntheticFieldOnly()) {
|
||||
|
||||
FieldReference reference = context.getReference(idFields.iterator().next());
|
||||
operationObject.put(Fields.UNDERSCORE_ID, reference.toString());
|
||||
|
||||
} else {
|
||||
|
||||
BasicDBObject inner = new BasicDBObject();
|
||||
|
||||
for (ExposedField field : nonSynthecticFields) {
|
||||
for (ExposedField field : idFields) {
|
||||
FieldReference reference = context.getReference(field);
|
||||
inner.put(field.getName(), reference.toString());
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -41,9 +40,11 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ProjectionOperation extends ExposedFieldsAggregationOperationContext implements AggregationOperation {
|
||||
public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
private static final List<Projection> NONE = Collections.emptyList();
|
||||
private static final String EXCLUSION_ERROR = "Exclusion of field %s not allowed. Projections by the mongodb "
|
||||
+ "aggregation framework only support the exclusion of the %s field!";
|
||||
|
||||
private final List<Projection> projections;
|
||||
|
||||
@@ -60,7 +61,7 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
public ProjectionOperation(Fields fields) {
|
||||
this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields, true));
|
||||
this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -117,23 +118,29 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
/**
|
||||
* Excludes the given fields from the projection.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param fieldNames must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andExclude(String... fields) {
|
||||
List<FieldProjection> excludeProjections = FieldProjection.from(Fields.fields(fields), false);
|
||||
public ProjectionOperation andExclude(String... fieldNames) {
|
||||
|
||||
for (String fieldName : fieldNames) {
|
||||
Assert.isTrue(Fields.UNDERSCORE_ID.equals(fieldName),
|
||||
String.format(EXCLUSION_ERROR, fieldName, Fields.UNDERSCORE_ID));
|
||||
}
|
||||
|
||||
List<FieldProjection> excludeProjections = FieldProjection.from(Fields.fields(fieldNames), false);
|
||||
return new ProjectionOperation(this.projections, excludeProjections);
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the given fields into the projection.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param fieldNames must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperation andInclude(String... fields) {
|
||||
public ProjectionOperation andInclude(String... fieldNames) {
|
||||
|
||||
List<FieldProjection> projections = FieldProjection.from(Fields.fields(fields), true);
|
||||
List<FieldProjection> projections = FieldProjection.from(Fields.fields(fieldNames), true);
|
||||
return new ProjectionOperation(this.projections, projections);
|
||||
}
|
||||
|
||||
@@ -147,12 +154,12 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return new ProjectionOperation(this.projections, FieldProjection.from(fields, true));
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#getFields()
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
protected ExposedFields getFields() {
|
||||
public ExposedFields getFields() {
|
||||
|
||||
ExposedFields fields = null;
|
||||
|
||||
@@ -184,6 +191,7 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
* Builder for {@link ProjectionOperation}s on a field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public static class ProjectionOperationBuilder implements AggregationOperation {
|
||||
|
||||
@@ -258,6 +266,18 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return project("add", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $add} expression that adds the value of the given field to the previously mentioned field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("add", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $subtract} expression that subtracts the given number to the previously mentioned field.
|
||||
*
|
||||
@@ -270,6 +290,19 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return project("subtract", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $subtract} expression that subtracts the value of the given field to the previously mentioned
|
||||
* field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder minus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("subtract", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $multiply} expression that multiplies the given number with the previously mentioned field.
|
||||
*
|
||||
@@ -282,6 +315,19 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return project("multiply", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $multiply} expression that multiplies the value of the given field with the previously
|
||||
* mentioned field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("multiply", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $divide} expression that divides the previously mentioned field by the given number.
|
||||
*
|
||||
@@ -295,6 +341,19 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return project("divide", number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $divide} expression that divides the value of the given field by the previously mentioned
|
||||
* field.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("divide", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the previously mentioned field by the given number and returns
|
||||
* the remainder.
|
||||
@@ -309,7 +368,21 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
return project("mod", number);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the value of the given field by the previously mentioned field
|
||||
* and returns the remainder.
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
return project("mod", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
@@ -362,6 +435,7 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
* A {@link FieldProjection} to map a result of a previous {@link AggregationOperation} to a new field.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
static class FieldProjection extends Projection {
|
||||
|
||||
@@ -386,20 +460,31 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to easily create {@link FieldProjection}s for the given {@link Fields}. Fields are projected as
|
||||
* references with their given name. A field {@code foo} will be projected as: {@code foo : 1 } .
|
||||
*
|
||||
* @param fields the {@link Fields} to in- or exclude, must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static List<? extends Projection> from(Fields fields) {
|
||||
return from(fields, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to easily create {@link FieldProjection}s for the given {@link Fields}.
|
||||
*
|
||||
* @param fields the {@link Fields} to in- or exclude, must not be {@literal null}.
|
||||
* @param include whether to include or exclude the fields.
|
||||
* @param value to use for the given field.
|
||||
* @return
|
||||
*/
|
||||
public static List<FieldProjection> from(Fields fields, boolean include) {
|
||||
public static List<FieldProjection> from(Fields fields, Object value) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
List<FieldProjection> projections = new ArrayList<FieldProjection>();
|
||||
|
||||
for (Field field : fields) {
|
||||
projections.add(new FieldProjection(field, include ? null : 0));
|
||||
projections.add(new FieldProjection(field, value));
|
||||
}
|
||||
|
||||
return projections;
|
||||
@@ -411,13 +496,24 @@ public class ProjectionOperation extends ExposedFieldsAggregationOperationContex
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject(field.getName(), renderFieldValue(context));
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
return new BasicDBObject(field.getName(), value);
|
||||
private Object renderFieldValue(AggregationOperationContext context) {
|
||||
|
||||
// implicit reference or explicit include?
|
||||
if (value == null || Boolean.TRUE.equals(value)) {
|
||||
|
||||
// check whether referenced field exists in the context
|
||||
return context.getReference(field).getReferenceValue();
|
||||
|
||||
} else if (Boolean.FALSE.equals(value)) {
|
||||
|
||||
// render field as excluded
|
||||
return 0;
|
||||
}
|
||||
|
||||
FieldReference reference = context.getReference(field.getTarget());
|
||||
return new BasicDBObject(field.getName(), reference.toString());
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -71,9 +71,9 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return mapper.getMappedObject(dbObject, mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field)
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
@@ -88,15 +88,17 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
PropertyPath path = PropertyPath.from(name, type);
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
return getReferenceFor(field(path.getLeafProperty().getSegment(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)));
|
||||
return getReferenceFor(field(name));
|
||||
}
|
||||
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
return new FieldReference(new ExposedField(field, true));
|
||||
|
||||
PropertyPath path = PropertyPath.from(field.getTarget(), type);
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
Field mappedField = field(propertyPath.getLeafProperty().getName(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE));
|
||||
|
||||
return new FieldReference(new ExposedField(mappedField, true));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class UnwindOperation extends ExposedFieldsAggregationOperationContext implements AggregationOperation {
|
||||
public class UnwindOperation implements AggregationOperation {
|
||||
|
||||
private final ExposedField field;
|
||||
|
||||
@@ -44,15 +44,6 @@ public class UnwindOperation extends ExposedFieldsAggregationOperationContext im
|
||||
this.field = new ExposedField(field, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#getFields()
|
||||
*/
|
||||
@Override
|
||||
protected ExposedFields getFields() {
|
||||
return ExposedFields.from(field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,12 +17,13 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -55,6 +56,7 @@ import org.springframework.util.Assert;
|
||||
* .
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class CustomConversions {
|
||||
|
||||
@@ -66,7 +68,7 @@ public class CustomConversions {
|
||||
private final Set<ConvertiblePair> writingPairs;
|
||||
private final Set<Class<?>> customSimpleTypes;
|
||||
private final SimpleTypeHolder simpleTypeHolder;
|
||||
private final Map<Class<?>, HashMap<Class<?>, CacheValue>> cache;
|
||||
private final ConcurrentMap<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
|
||||
private final List<Object> converters;
|
||||
|
||||
@@ -86,12 +88,13 @@ public class CustomConversions {
|
||||
|
||||
Assert.notNull(converters);
|
||||
|
||||
this.readingPairs = new HashSet<ConvertiblePair>();
|
||||
this.writingPairs = new HashSet<ConvertiblePair>();
|
||||
this.readingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.writingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.customSimpleTypes = new HashSet<Class<?>>();
|
||||
this.cache = new HashMap<Class<?>, HashMap<Class<?>, CacheValue>>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<GenericConverter.ConvertiblePair, CacheValue>();
|
||||
|
||||
this.converters = new ArrayList<Object>();
|
||||
this.converters.addAll(converters);
|
||||
this.converters.add(CustomToStringConverter.INSTANCE);
|
||||
this.converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
this.converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
@@ -101,7 +104,6 @@ public class CustomConversions {
|
||||
this.converters.add(StringToURLConverter.INSTANCE);
|
||||
this.converters.add(DBObjectToStringConverter.INSTANCE);
|
||||
this.converters.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
this.converters.addAll(converters);
|
||||
|
||||
for (Object c : this.converters) {
|
||||
registerConversion(c);
|
||||
@@ -194,25 +196,25 @@ public class CustomConversions {
|
||||
*
|
||||
* @param pair
|
||||
*/
|
||||
private void register(ConverterRegistration context) {
|
||||
private void register(ConverterRegistration converterRegistration) {
|
||||
|
||||
ConvertiblePair pair = context.getConvertiblePair();
|
||||
ConvertiblePair pair = converterRegistration.getConvertiblePair();
|
||||
|
||||
if (context.isReading()) {
|
||||
if (converterRegistration.isReading()) {
|
||||
|
||||
readingPairs.add(pair);
|
||||
|
||||
if (LOG.isWarnEnabled() && !context.isSimpleSourceType()) {
|
||||
if (LOG.isWarnEnabled() && !converterRegistration.isSimpleSourceType()) {
|
||||
LOG.warn(String.format(READ_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType()));
|
||||
}
|
||||
}
|
||||
|
||||
if (context.isWriting()) {
|
||||
if (converterRegistration.isWriting()) {
|
||||
|
||||
writingPairs.add(pair);
|
||||
customSimpleTypes.add(pair.getSourceType());
|
||||
|
||||
if (LOG.isWarnEnabled() && !context.isSimpleTargetType()) {
|
||||
if (LOG.isWarnEnabled() && !converterRegistration.isSimpleTargetType()) {
|
||||
LOG.warn(String.format(WRITE_CONVERTER_NOT_SIMPLE, pair.getSourceType(), pair.getTargetType()));
|
||||
}
|
||||
}
|
||||
@@ -222,11 +224,11 @@ public class CustomConversions {
|
||||
* Returns the target type to convert to in case we have a custom conversion registered to convert the given source
|
||||
* type into a Mongo native one.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source) {
|
||||
return getCustomWriteTarget(source, null);
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType) {
|
||||
return getCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -234,71 +236,78 @@ public class CustomConversions {
|
||||
* oth the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply return the
|
||||
* first target type matching or {@literal null} if no conversion can be found.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
Assert.notNull(source);
|
||||
return getCustomTarget(source, expectedTargetType, writingPairs);
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write into a Mongo native type. The returned type might
|
||||
* be a subclass oth the given expected type though.
|
||||
* be a subclass of the given expected type though.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> source) {
|
||||
return hasCustomWriteTarget(source, null);
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return hasCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write an object of the given source type into an object
|
||||
* of the given Mongo native target type.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param expectedTargetType
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
return getCustomWriteTarget(source, expectedTargetType) != null;
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return getCustomWriteTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to read the given source into the given target type.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomReadTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
public boolean hasCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(expectedTargetType);
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(requestedTargetType);
|
||||
|
||||
return getCustomReadTarget(source, expectedTargetType) != null;
|
||||
return getCustomReadTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair} for ones that have a source compatible type as source. Additionally
|
||||
* checks assignabilty of the target type if one is given.
|
||||
* checks assignability of the target type if one is given.
|
||||
*
|
||||
* @param source must not be {@literal null}
|
||||
* @param expectedTargetType
|
||||
* @param pairs must not be {@literal null}
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @param pairs must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static Class<?> getCustomTarget(Class<?> source, Class<?> expectedTargetType, Iterable<ConvertiblePair> pairs) {
|
||||
private static Class<?> getCustomTarget(Class<?> sourceType, Class<?> requestedTargetType,
|
||||
Iterable<ConvertiblePair> pairs) {
|
||||
|
||||
Assert.notNull(source);
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(pairs);
|
||||
|
||||
for (ConvertiblePair typePair : pairs) {
|
||||
if (typePair.getSourceType().isAssignableFrom(source)) {
|
||||
if (typePair.getSourceType().isAssignableFrom(sourceType)) {
|
||||
Class<?> targetType = typePair.getTargetType();
|
||||
if (expectedTargetType == null || targetType.isAssignableFrom(expectedTargetType)) {
|
||||
if (requestedTargetType == null || targetType.isAssignableFrom(requestedTargetType)) {
|
||||
return targetType;
|
||||
}
|
||||
}
|
||||
@@ -307,27 +316,33 @@ public class CustomConversions {
|
||||
return null;
|
||||
}
|
||||
|
||||
private Class<?> getCustomReadTarget(Class<?> source, Class<?> expectedTargetType) {
|
||||
/**
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Class<?> type = expectedTargetType == null ? PlaceholderType.class : expectedTargetType;
|
||||
Assert.notNull(sourceType);
|
||||
|
||||
Map<Class<?>, CacheValue> map;
|
||||
CacheValue toReturn;
|
||||
|
||||
if ((map = cache.get(source)) == null || (toReturn = map.get(type)) == null) {
|
||||
|
||||
Class<?> target = getCustomTarget(source, type, readingPairs);
|
||||
|
||||
if (cache.get(source) == null) {
|
||||
cache.put(source, new HashMap<Class<?>, CacheValue>());
|
||||
}
|
||||
|
||||
Map<Class<?>, CacheValue> value = cache.get(source);
|
||||
toReturn = target == null ? CacheValue.NULL : new CacheValue(target);
|
||||
value.put(type, toReturn);
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return toReturn.clazz;
|
||||
ConvertiblePair lookupKey = new ConvertiblePair(sourceType, requestedTargetType);
|
||||
CacheValue readTargetTypeValue = customReadTargetTypes.get(lookupKey);
|
||||
|
||||
if (readTargetTypeValue != null) {
|
||||
return readTargetTypeValue.getType();
|
||||
}
|
||||
|
||||
readTargetTypeValue = CacheValue.of(getCustomTarget(sourceType, requestedTargetType, readingPairs));
|
||||
CacheValue cacheValue = customReadTargetTypes.putIfAbsent(lookupKey, readTargetTypeValue);
|
||||
|
||||
return cacheValue != null ? cacheValue.getType() : readTargetTypeValue.getType();
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
@@ -336,8 +351,10 @@ public class CustomConversions {
|
||||
INSTANCE;
|
||||
|
||||
public Set<ConvertiblePair> getConvertibleTypes() {
|
||||
|
||||
ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class);
|
||||
ConvertiblePair booleanToString = new ConvertiblePair(Character.class, String.class);
|
||||
|
||||
return new HashSet<ConvertiblePair>(Arrays.asList(localeToString, booleanToString));
|
||||
}
|
||||
|
||||
@@ -346,29 +363,29 @@ public class CustomConversions {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Placeholder type to allow registering not-found values in the converter cache.
|
||||
*
|
||||
* @author Patryk Wasik
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class PlaceholderType {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper to safely store {@literal null} values in the type cache.
|
||||
*
|
||||
* @author Patryk Wasik
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class CacheValue {
|
||||
|
||||
public static final CacheValue NULL = new CacheValue(null);
|
||||
private final Class<?> clazz;
|
||||
private static final CacheValue ABSENT = new CacheValue(null);
|
||||
|
||||
public CacheValue(Class<?> clazz) {
|
||||
this.clazz = clazz;
|
||||
private final Class<?> type;
|
||||
|
||||
public CacheValue(Class<?> type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public Class<?> getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
static CacheValue of(Class<?> type) {
|
||||
return type == null ? ABSENT : new CacheValue(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import java.util.Set;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
@@ -30,6 +31,7 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -44,11 +46,11 @@ import com.mongodb.DBRef;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MongoConverter converter;
|
||||
@@ -79,7 +81,7 @@ public class QueryMapper {
|
||||
@SuppressWarnings("deprecation")
|
||||
public DBObject getMappedObject(DBObject query, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (Keyword.isKeyword(query)) {
|
||||
if (isNestedKeyword(query)) {
|
||||
return getMappedKeyword(new Keyword(query), entity);
|
||||
}
|
||||
|
||||
@@ -97,17 +99,17 @@ public class QueryMapper {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Keyword.isKeyword(key)) {
|
||||
if (isKeyword(key)) {
|
||||
result.putAll(getMappedKeyword(new Keyword(query, key), entity));
|
||||
continue;
|
||||
}
|
||||
|
||||
Field field = entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
|
||||
Object rawValue = query.get(key);
|
||||
String newKey = field.getMappedKey();
|
||||
|
||||
if (Keyword.isKeyword(rawValue) && !field.isIdField()) {
|
||||
if (isNestedKeyword(rawValue) && !field.isIdField()) {
|
||||
Keyword keyword = new Keyword((DBObject) rawValue);
|
||||
result.put(newKey, getMappedKeyword(field, keyword));
|
||||
} else {
|
||||
@@ -118,19 +120,30 @@ public class QueryMapper {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param entity
|
||||
* @param key
|
||||
* @param mappingContext
|
||||
* @return
|
||||
*/
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link DBObject} representing a keyword by mapping the keyword's value.
|
||||
*
|
||||
* @param query the {@link DBObject} representing a keyword (e.g. {@code $ne : … } )
|
||||
* @param keyword the {@link DBObject} representing a keyword (e.g. {@code $ne : … } )
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
private DBObject getMappedKeyword(Keyword query, MongoPersistentEntity<?> entity) {
|
||||
private DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity<?> entity) {
|
||||
|
||||
// $or/$nor
|
||||
if (query.key.matches(N_OR_PATTERN) || query.value instanceof Iterable) {
|
||||
if (keyword.isOrOrNor() || keyword.hasIterableValue()) {
|
||||
|
||||
Iterable<?> conditions = (Iterable<?>) query.value;
|
||||
Iterable<?> conditions = keyword.getValue();
|
||||
BasicDBList newConditions = new BasicDBList();
|
||||
|
||||
for (Object condition : conditions) {
|
||||
@@ -138,10 +151,10 @@ public class QueryMapper {
|
||||
: convertSimpleOrDBObject(condition, entity));
|
||||
}
|
||||
|
||||
return new BasicDBObject(query.key, newConditions);
|
||||
return new BasicDBObject(keyword.getKey(), newConditions);
|
||||
}
|
||||
|
||||
return new BasicDBObject(query.key, convertSimpleOrDBObject(query.value, entity));
|
||||
return new BasicDBObject(keyword.getKey(), convertSimpleOrDBObject(keyword.getValue(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -154,10 +167,12 @@ public class QueryMapper {
|
||||
private DBObject getMappedKeyword(Field property, Keyword keyword) {
|
||||
|
||||
boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists();
|
||||
Object value = needsAssociationConversion ? convertAssociation(keyword.value, property.getProperty())
|
||||
: getMappedValue(property.with(keyword.key), keyword.value);
|
||||
Object value = keyword.getValue();
|
||||
|
||||
return new BasicDBObject(keyword.key, value);
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property.getProperty())
|
||||
: getMappedValue(property.with(keyword.getKey()), value);
|
||||
|
||||
return new BasicDBObject(keyword.key, convertedValue);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -175,37 +190,54 @@ public class QueryMapper {
|
||||
|
||||
if (value instanceof DBObject) {
|
||||
DBObject valueDbo = (DBObject) value;
|
||||
DBObject resultDbo = new BasicDBObject(valueDbo.toMap());
|
||||
|
||||
if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
valueDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
resultDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
} else if (valueDbo.containsField("$ne")) {
|
||||
valueDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject((DBObject) value, null);
|
||||
return getMappedObject(resultDbo, null);
|
||||
}
|
||||
|
||||
return valueDbo;
|
||||
return resultDbo;
|
||||
|
||||
} else {
|
||||
return convertId(value);
|
||||
}
|
||||
}
|
||||
|
||||
if (Keyword.isKeyword(value)) {
|
||||
if (isNestedKeyword(value)) {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
}
|
||||
|
||||
if (documentField.isAssociation()) {
|
||||
if (isAssociationConversionNecessary(documentField, value)) {
|
||||
return convertAssociation(value, documentField.getProperty());
|
||||
}
|
||||
|
||||
return convertSimpleOrDBObject(value, documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Field} represents an association reference that together with the given value
|
||||
* requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the
|
||||
* type of the given value is compatible with the type of the given document field in order to deal with potential
|
||||
* query field exclusions, since MongoDB uses the {@code int} {@literal 0} as an indicator for an excluded field.
|
||||
*
|
||||
* @param documentField
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
private boolean isAssociationConversionNecessary(Field documentField, Object value) {
|
||||
return documentField.isAssociation() && value != null
|
||||
&& documentField.getProperty().getActualType().isAssignableFrom(value.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Retriggers mapping if the given source is a {@link DBObject} or simply invokes the
|
||||
*
|
||||
@@ -247,7 +279,8 @@ public class QueryMapper {
|
||||
*/
|
||||
private Object convertAssociation(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (property == null || !property.isAssociation()) {
|
||||
if (property == null || !property.isAssociation() || source == null || source instanceof DBRef
|
||||
|| !property.isEntity()) {
|
||||
return source;
|
||||
}
|
||||
|
||||
@@ -269,7 +302,7 @@ public class QueryMapper {
|
||||
return result;
|
||||
}
|
||||
|
||||
return source == null || source instanceof DBRef ? source : converter.toDBRef(source, property);
|
||||
return converter.toDBRef(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -289,6 +322,39 @@ public class QueryMapper {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Object} is a keyword, i.e. if it's a {@link DBObject} with a keyword key.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isNestedKeyword(Object candidate) {
|
||||
|
||||
if (!(candidate instanceof BasicDBObject)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BasicDBObject dbObject = (BasicDBObject) candidate;
|
||||
Set<String> keys = dbObject.keySet();
|
||||
|
||||
if (keys.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isKeyword(keys.iterator().next().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link String} is a MongoDB keyword. The default implementation will check against the
|
||||
* set of registered keywords returned by {@link #getKeywords()}.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isKeyword(String candidate) {
|
||||
return candidate.startsWith("$");
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object to capture a query keyword representation.
|
||||
*
|
||||
@@ -296,8 +362,10 @@ public class QueryMapper {
|
||||
*/
|
||||
private static class Keyword {
|
||||
|
||||
String key;
|
||||
Object value;
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
|
||||
private final String key;
|
||||
private final Object value;
|
||||
|
||||
public Keyword(DBObject source, String key) {
|
||||
this.key = key;
|
||||
@@ -322,25 +390,21 @@ public class QueryMapper {
|
||||
return "$exists".equalsIgnoreCase(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given value actually represents a keyword. If this returns {@literal true} it's safe to call
|
||||
* the constructor.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public static boolean isKeyword(Object value) {
|
||||
public boolean isOrOrNor() {
|
||||
return key.matches(N_OR_PATTERN);
|
||||
}
|
||||
|
||||
if (value instanceof String) {
|
||||
return ((String) value).startsWith("$");
|
||||
}
|
||||
public boolean hasIterableValue() {
|
||||
return value instanceof Iterable;
|
||||
}
|
||||
|
||||
if (!(value instanceof DBObject)) {
|
||||
return false;
|
||||
}
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
DBObject dbObject = (DBObject) value;
|
||||
return dbObject.keySet().size() == 1 && dbObject.keySet().iterator().next().startsWith("$");
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T getValue() {
|
||||
return (T) value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -349,7 +413,7 @@ public class QueryMapper {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class Field {
|
||||
protected static class Field {
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
|
||||
@@ -426,12 +490,14 @@ public class QueryMapper {
|
||||
* Extension of {@link DocumentField} to be backed with mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class MetadataBackedField extends Field {
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistentPropertyPath<MongoPersistentProperty> path;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and
|
||||
@@ -451,7 +517,7 @@ public class QueryMapper {
|
||||
this.entity = entity;
|
||||
this.mappingContext = context;
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
this.path = getPath(name);
|
||||
this.property = path == null ? null : path.getLeafProperty();
|
||||
}
|
||||
|
||||
@@ -516,19 +582,33 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public String getMappedKey() {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
return path == null ? name : path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE);
|
||||
return path == null ? name : path.toDotPath(getPropertyConverter());
|
||||
}
|
||||
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String name) {
|
||||
/**
|
||||
* Returns the {@link PersistentPropertyPath} for the given <code>pathExpression</code>.
|
||||
*
|
||||
* @param pathExpression
|
||||
* @return
|
||||
*/
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
try {
|
||||
PropertyPath path = PropertyPath.from(name, entity.getTypeInformation());
|
||||
PropertyPath path = PropertyPath.from(pathExpression, entity.getTypeInformation());
|
||||
return mappingContext.getPersistentPropertyPath(path);
|
||||
} catch (PropertyReferenceException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Converter} to be used to created the mapped key. Default implementation will use
|
||||
* {@link PropertyToFieldNameConverter}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,12 +15,21 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* A subclass of {@link QueryMapper} that retains type information on the mongo types.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class UpdateMapper extends QueryMapper {
|
||||
|
||||
@@ -49,4 +58,90 @@ public class UpdateMapper extends QueryMapper {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext)
|
||||
*/
|
||||
@Override
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext) : //
|
||||
new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MetadataBackedField} that handles {@literal $} paths inside a field key. We clean up an update key
|
||||
* containing a {@literal $} before handing it to the super class to make sure property lookups and transformations
|
||||
* continue to work as expected. We provide a custom property converter to re-applied the cleaned up {@literal $}s
|
||||
* when constructing the mapped key.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class MetadataBackedUpdateField extends MetadataBackedField {
|
||||
|
||||
private final String key;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given {@link MongoPersistentEntity}, key and
|
||||
* {@link MappingContext}. We clean up the key before handing it up to the super class to make sure it continues to
|
||||
* work as expected.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$", ""), entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getPropertyConverter()
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new UpdatePropertyConverter(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Special {@link Converter} for {@link MongoPersistentProperty} instances that will concatenate the {@literal $}
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
*
|
||||
* @param updateKey must not be {@literal null} or empty.
|
||||
*/
|
||||
public UpdatePropertyConverter(String updateKey) {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(updateKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
return iterator.hasNext() && iterator.next().equals("$") ? String.format("%s.$", mappedName) : mappedName;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class NearQuery {
|
||||
|
||||
@@ -143,10 +144,12 @@ public class NearQuery {
|
||||
/**
|
||||
* Configures the {@link Pageable} to use.
|
||||
*
|
||||
* @param pageable
|
||||
* @param pageable must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public NearQuery with(Pageable pageable) {
|
||||
|
||||
Assert.notNull(pageable, "Pageable must not be 'null'.");
|
||||
this.num = pageable.getOffset() + pageable.getPageSize();
|
||||
this.skip = pageable.getOffset();
|
||||
return this;
|
||||
@@ -311,13 +314,18 @@ public class NearQuery {
|
||||
/**
|
||||
* Adds an actual query to the {@link NearQuery} to restrict the objects considered for the actual near operation.
|
||||
*
|
||||
* @param query
|
||||
* @param query must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public NearQuery query(Query query) {
|
||||
|
||||
Assert.notNull(query, "Cannot apply 'null' query on NearQuery.");
|
||||
this.query = query;
|
||||
this.skip = query.getSkip();
|
||||
this.num = query.getLimit();
|
||||
|
||||
if (query.getLimit() != 0) {
|
||||
this.num = query.getLimit();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.gridfs.GridFSFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface GridFsOperations extends ResourcePatternResolver {
|
||||
|
||||
@@ -126,7 +127,7 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* Returns all {@link GridFsResource} with the given file name.
|
||||
*
|
||||
* @param filename
|
||||
* @return
|
||||
* @return the resource if it exists or {@literal null}.
|
||||
* @see ResourcePatternResolver#getResource(String)
|
||||
*/
|
||||
GridFsResource getResource(String filename);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,6 +43,7 @@ import com.mongodb.gridfs.GridFSInputFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver {
|
||||
|
||||
@@ -190,7 +191,9 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
* @see org.springframework.core.io.ResourceLoader#getResource(java.lang.String)
|
||||
*/
|
||||
public GridFsResource getResource(String location) {
|
||||
return new GridFsResource(findOne(query(whereFilename().is(location))));
|
||||
|
||||
GridFSDBFile file = findOne(query(whereFilename().is(location)));
|
||||
return file != null ? new GridFsResource(file) : null;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -80,7 +80,7 @@ public @interface EnableMongoRepositories {
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String repositoryImplementationPostfix() default "";
|
||||
String repositoryImplementationPostfix() default "Impl";
|
||||
|
||||
/**
|
||||
* Configures the location of where to find the Spring Data named queries properties file. Will default to
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -79,22 +79,24 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
MongoParameterAccessor accessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query query = createQuery(new ConvertingParameterAccessor(operations.getConverter(), accessor));
|
||||
|
||||
Object result = null;
|
||||
|
||||
if (method.isGeoNearQuery() && method.isPageQuery()) {
|
||||
|
||||
MongoParameterAccessor countAccessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query countQuery = createCountQuery(new ConvertingParameterAccessor(operations.getConverter(), countAccessor));
|
||||
|
||||
return new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
result = new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
} else if (method.isGeoNearQuery()) {
|
||||
return new GeoNearExecution(accessor).execute(query);
|
||||
} else if (method.isCollectionQuery()) {
|
||||
return new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
result = new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
} else if (method.isPageQuery()) {
|
||||
return new PagedExecution(accessor.getPageable()).execute(query);
|
||||
result = new PagedExecution(accessor.getPageable()).execute(query);
|
||||
} else {
|
||||
result = new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
}
|
||||
|
||||
Object result = new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
|
||||
if (result == null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.DBObject;
|
||||
import com.mysema.query.mongodb.MongodbSerializer;
|
||||
import com.mysema.query.types.Path;
|
||||
import com.mysema.query.types.PathMetadata;
|
||||
import com.mysema.query.types.PathType;
|
||||
|
||||
/**
|
||||
* Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints.
|
||||
@@ -61,6 +62,10 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
@Override
|
||||
protected String getKeyForPath(Path<?> expr, PathMetadata<?> metadata) {
|
||||
|
||||
if (!metadata.getPathType().equals(PathType.PROPERTY)) {
|
||||
return super.getKeyForPath(expr, metadata);
|
||||
}
|
||||
|
||||
Path<?> parent = metadata.getParent();
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
|
||||
MongoPersistentProperty property = entity.getPersistentProperty(metadata.getName());
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -209,7 +209,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -248,7 +248,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -259,7 +259,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -197,7 +197,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -246,7 +246,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -257,7 +257,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -197,7 +197,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -261,7 +261,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -272,7 +272,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -276,7 +276,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -287,7 +287,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Sample configuration class in default package.
|
||||
*
|
||||
* @see DATAMONGO-877
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Configuration
|
||||
public class ConfigClassInDefaultPackage extends AbstractMongoConfiguration {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#getDatabaseName()
|
||||
*/
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "default";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#mongo()
|
||||
*/
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import org.junit.Test;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
|
||||
/**
|
||||
* Unit test for {@link ConfigClassInDefaultPackage}.
|
||||
*
|
||||
* @see DATAMONGO-877
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class ConfigClassInDefaultPackageUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-877
|
||||
*/
|
||||
@Test
|
||||
public void loadsConfigClassFromDefaultPackage() {
|
||||
new AnnotationConfigApplicationContext(ConfigClassInDefaultPackage.class).close();
|
||||
}
|
||||
}
|
||||
@@ -35,7 +35,7 @@ import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
public class AuditingIntegrationTests {
|
||||
|
||||
@Test
|
||||
public void enablesAuditingAndSetsPropertiesAccordingly() {
|
||||
public void enablesAuditingAndSetsPropertiesAccordingly() throws Exception {
|
||||
|
||||
ApplicationContext context = new ClassPathXmlApplicationContext("auditing.xml", getClass());
|
||||
|
||||
@@ -46,6 +46,7 @@ public class AuditingIntegrationTests {
|
||||
assertThat(entity.created, is(notNullValue()));
|
||||
assertThat(entity.modified, is(entity.created));
|
||||
|
||||
Thread.sleep(10);
|
||||
entity.id = 1L;
|
||||
event = new BeforeConvertEvent<Entity>(entity);
|
||||
context.publishEvent(event);
|
||||
|
||||
@@ -19,6 +19,7 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Ignore;
|
||||
@@ -40,8 +41,7 @@ import com.mongodb.ServerAddress;
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceReplicaSetTests {
|
||||
|
||||
@Autowired
|
||||
private ApplicationContext ctx;
|
||||
@Autowired private ApplicationContext ctx;
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -53,7 +53,10 @@ public class MongoNamespaceReplicaSetTests {
|
||||
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
|
||||
|
||||
assertThat(replicaSetSeeds, is(notNullValue()));
|
||||
assertThat(replicaSetSeeds, hasItems(new ServerAddress("127.0.0.1", 10001), new ServerAddress("localhost", 10002)));
|
||||
assertThat(
|
||||
replicaSetSeeds,
|
||||
hasItems(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001),
|
||||
new ServerAddress(InetAddress.getByName("localhost"), 10002)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -18,12 +18,15 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
@@ -35,6 +38,8 @@ import com.mongodb.ServerAddress;
|
||||
*/
|
||||
public class ServerAddressPropertyEditorUnitTests {
|
||||
|
||||
@Rule public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
ServerAddressPropertyEditor editor;
|
||||
|
||||
@Before
|
||||
@@ -81,11 +86,111 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
assertNull(editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackShort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "::1";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackShortWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "::1";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Here we detect no port since the last segment of the address contains leading zeros.
|
||||
*
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackLong() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:0001";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackLongWithBrackets() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "[0000:0000:0000:0000:0000:0000:0000:0001]";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* We can't tell whether the last part of the hostAddress represents a port or not.
|
||||
*
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void shouldFailToHandleAmbiguousIPv6HostaddressLongWithoutPortAndWithoutBrackets() throws UnknownHostException {
|
||||
|
||||
expectedException.expect(IllegalArgumentException.class);
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:128";
|
||||
editor.setAsText(hostAddress);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressExampleAddressWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:0001";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressExampleAddressInBracketsWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "[0000:0000:0000:0000:0000:0000:0000:0001]";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
private static void assertSingleAddressOfLocalhost(Object result) throws UnknownHostException {
|
||||
assertSingleAddressWithPort("localhost", null, result);
|
||||
}
|
||||
|
||||
private static void assertSingleAddressWithPort(String hostAddress, Integer port, Object result)
|
||||
throws UnknownHostException {
|
||||
|
||||
assertThat(result, is(instanceOf(ServerAddress[].class)));
|
||||
Collection<ServerAddress> addresses = Arrays.asList((ServerAddress[]) result);
|
||||
assertThat(addresses, hasSize(1));
|
||||
assertThat(addresses, hasItem(new ServerAddress("localhost")));
|
||||
if (port == null) {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress))));
|
||||
} else {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress), port)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,7 +70,7 @@ public abstract class DBObjectUtils {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T getTypedValue(DBObject source, String key, Class<T> type) {
|
||||
public static <T> T getTypedValue(DBObject source, String key, Class<T> type) {
|
||||
|
||||
Object value = source.get(key);
|
||||
assertThat(value, is(notNullValue()));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -51,6 +51,7 @@ import org.springframework.dao.OptimisticLockingFailureException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
@@ -70,6 +71,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBCollection;
|
||||
@@ -90,6 +92,8 @@ import com.mongodb.WriteResult;
|
||||
* @author Amol Nayak
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Komi Innocent
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
@@ -159,6 +163,8 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(ObjectWith3AliasedFields.class);
|
||||
template.dropCollection(ObjectWith3AliasedFieldsAndNestedAddress.class);
|
||||
template.dropCollection(BaseDoc.class);
|
||||
template.dropCollection(ObjectWithEnumValue.class);
|
||||
template.dropCollection(DocumentWithCollection.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -212,6 +218,7 @@ public class MongoTemplateTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-480
|
||||
* @see DATAMONGO-799
|
||||
*/
|
||||
@Test
|
||||
public void throwsExceptionForUpdateWithInvalidPushOperator() {
|
||||
@@ -227,8 +234,10 @@ public class MongoTemplateTests {
|
||||
|
||||
thrown.expect(DataIntegrityViolationException.class);
|
||||
thrown.expectMessage("Execution");
|
||||
thrown.expectMessage("$push");
|
||||
thrown.expectMessage("UPDATE");
|
||||
thrown.expectMessage("array");
|
||||
thrown.expectMessage("firstName");
|
||||
thrown.expectMessage("failed");
|
||||
|
||||
Query query = new Query(Criteria.where("firstName").is("Amol"));
|
||||
Update upd = new Update().push("age", 29);
|
||||
@@ -333,6 +342,43 @@ public class MongoTemplateTests {
|
||||
assertThat(field, is(IndexField.create("age", Direction.DESC)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-746
|
||||
*/
|
||||
@Test
|
||||
public void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() throws Exception {
|
||||
|
||||
String command = "db." + template.getCollectionName(Person.class)
|
||||
+ ".ensureIndex({'age':-1}, {'unique':true, 'sparse':true})";
|
||||
template.indexOps(Person.class).dropAllIndexes();
|
||||
|
||||
assertThat(template.indexOps(Person.class).getIndexInfo().isEmpty(), is(true));
|
||||
factory.getDb().eval(command);
|
||||
|
||||
List<DBObject> indexInfo = template.getCollection(template.getCollectionName(Person.class)).getIndexInfo();
|
||||
String indexKey = null;
|
||||
boolean unique = false;
|
||||
|
||||
for (DBObject ix : indexInfo) {
|
||||
if ("age_-1".equals(ix.get("name"))) {
|
||||
indexKey = ix.get("key").toString();
|
||||
unique = (Boolean) ix.get("unique");
|
||||
}
|
||||
}
|
||||
|
||||
assertThat(indexKey, is("{ \"age\" : -1.0}"));
|
||||
assertThat(unique, is(true));
|
||||
|
||||
IndexInfo info = template.indexOps(Person.class).getIndexInfo().get(1);
|
||||
assertThat(info.isUnique(), is(true));
|
||||
assertThat(info.isSparse(), is(true));
|
||||
|
||||
List<IndexField> indexFields = info.getIndexFields();
|
||||
IndexField field = indexFields.get(0);
|
||||
|
||||
assertThat(field, is(IndexField.create("age", Direction.DESC)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProperHandlingOfDifferentIdTypesWithMappingMongoConverter() throws Exception {
|
||||
testProperHandlingOfDifferentIdTypes(this.mappingTemplate);
|
||||
@@ -1755,12 +1801,12 @@ public class MongoTemplateTests {
|
||||
|
||||
Document doc = new Document();
|
||||
doc.id = "4711";
|
||||
doc.model = new ModelA().withValue("foo");
|
||||
doc.model = new ModelA("foo");
|
||||
template.insert(doc);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(doc.id));
|
||||
String newModelValue = "bar";
|
||||
Update update = Update.update("model", new ModelA().withValue(newModelValue));
|
||||
Update update = Update.update("model", new ModelA(newModelValue));
|
||||
template.updateFirst(query, update, Document.class);
|
||||
|
||||
Document result = template.findOne(query, Document.class);
|
||||
@@ -1996,25 +2042,266 @@ public class MongoTemplateTests {
|
||||
assertThat(result.get(2).getClass(), is((Object) VerySpecialDoc.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-771
|
||||
*/
|
||||
@Test
|
||||
public void allowInsertWithPlainJsonString() {
|
||||
|
||||
String id = "4711";
|
||||
String value = "bubu";
|
||||
String json = String.format("{_id:%s, field: '%s'}", id, value);
|
||||
|
||||
template.insert(json, "sample");
|
||||
List<Sample> result = template.findAll(Sample.class);
|
||||
|
||||
assertThat(result.size(), is(1));
|
||||
assertThat(result.get(0).id, is(id));
|
||||
assertThat(result.get(0).field, is(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-816
|
||||
*/
|
||||
@Test
|
||||
public void shouldExecuteQueryShouldMapQueryBeforeQueryExecution() {
|
||||
|
||||
ObjectWithEnumValue o = new ObjectWithEnumValue();
|
||||
o.value = EnumValue.VALUE2;
|
||||
template.save(o);
|
||||
|
||||
Query q = Query.query(Criteria.where("value").in(EnumValue.VALUE2));
|
||||
|
||||
template.executeQuery(q, StringUtils.uncapitalize(ObjectWithEnumValue.class.getSimpleName()),
|
||||
new DocumentCallbackHandler() {
|
||||
|
||||
@Override
|
||||
public void processDocument(DBObject dbObject) throws MongoException, DataAccessException {
|
||||
|
||||
assertThat(dbObject, is(notNullValue()));
|
||||
|
||||
ObjectWithEnumValue result = template.getConverter().read(ObjectWithEnumValue.class, dbObject);
|
||||
|
||||
assertThat(result.value, is(EnumValue.VALUE2));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateFirstShouldIncreaseVersionForVersionedEntity() {
|
||||
|
||||
VersionedPerson person = new VersionedPerson();
|
||||
person.firstname = "Dave";
|
||||
person.lastname = "Matthews";
|
||||
template.save(person);
|
||||
assertThat(person.id, is(notNullValue()));
|
||||
|
||||
Query qry = query(where("id").is(person.id));
|
||||
VersionedPerson personAfterFirstSave = template.findOne(qry, VersionedPerson.class);
|
||||
assertThat(personAfterFirstSave.version, is(0L));
|
||||
|
||||
template.updateFirst(qry, Update.update("lastname", "Bubu"), VersionedPerson.class);
|
||||
|
||||
VersionedPerson personAfterUpdateFirst = template.findOne(qry, VersionedPerson.class);
|
||||
assertThat(personAfterUpdateFirst.version, is(1L));
|
||||
assertThat(personAfterUpdateFirst.lastname, is("Bubu"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateFirstShouldIncreaseVersionOnlyForFirstMatchingEntity() {
|
||||
|
||||
VersionedPerson person1 = new VersionedPerson();
|
||||
person1.firstname = "Dave";
|
||||
|
||||
VersionedPerson person2 = new VersionedPerson();
|
||||
person2.firstname = "Dave";
|
||||
|
||||
template.save(person1);
|
||||
template.save(person2);
|
||||
Query q = query(where("id").in(person1.id, person2.id));
|
||||
|
||||
template.updateFirst(q, Update.update("lastname", "Metthews"), VersionedPerson.class);
|
||||
|
||||
for (VersionedPerson p : template.find(q, VersionedPerson.class)) {
|
||||
if ("Metthews".equals(p.lastname)) {
|
||||
assertThat(p.version, equalTo(Long.valueOf(1)));
|
||||
} else {
|
||||
assertThat(p.version, equalTo(Long.valueOf(0)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateMultiShouldIncreaseVersionOfAllUpdatedEntities() {
|
||||
|
||||
VersionedPerson person1 = new VersionedPerson();
|
||||
person1.firstname = "Dave";
|
||||
|
||||
VersionedPerson person2 = new VersionedPerson();
|
||||
person2.firstname = "Dave";
|
||||
|
||||
template.save(person1);
|
||||
template.save(person2);
|
||||
|
||||
Query q = query(where("id").in(person1.id, person2.id));
|
||||
template.updateMulti(q, Update.update("lastname", "Metthews"), VersionedPerson.class);
|
||||
|
||||
for (VersionedPerson p : template.find(q, VersionedPerson.class)) {
|
||||
assertThat(p.version, equalTo(Long.valueOf(1)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-686
|
||||
*/
|
||||
@Test
|
||||
public void itShouldBePossibleToReuseAnExistingQuery() {
|
||||
|
||||
Sample sample = new Sample();
|
||||
sample.id = "42";
|
||||
sample.field = "A";
|
||||
|
||||
template.save(sample);
|
||||
|
||||
Query query = new Query();
|
||||
query.addCriteria(where("_id").in("42", "43"));
|
||||
|
||||
assertThat(template.count(query, Sample.class), is(1L));
|
||||
|
||||
query.with(new PageRequest(0, 10));
|
||||
query.with(new Sort("field"));
|
||||
|
||||
assertThat(template.find(query, Sample.class), is(not(empty())));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldRetrainTypeInformationWithinUpdatedType() {
|
||||
|
||||
Document document = new Document();
|
||||
document.model = new ModelA("value1");
|
||||
|
||||
template.save(document);
|
||||
|
||||
Query query = query(where("id").is(document.id));
|
||||
Update update = Update.update("model", new ModelA("value2"));
|
||||
template.findAndModify(query, update, Document.class);
|
||||
|
||||
Document retrieved = template.findOne(query, Document.class);
|
||||
Assert.assertThat(retrieved.model, instanceOf(ModelA.class));
|
||||
Assert.assertThat(retrieved.model.value(), equalTo("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updatesShouldRetainTypeInformationEvenForCollections() {
|
||||
|
||||
DocumentWithCollection doc = new DocumentWithCollection();
|
||||
doc.id = "4711";
|
||||
doc.model = new ArrayList<Model>();
|
||||
doc.model.add(new ModelA("foo"));
|
||||
template.insert(doc);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(doc.id));
|
||||
query.addCriteria(where("model.value").is("foo"));
|
||||
String newModelValue = "bar";
|
||||
Update update = Update.update("model.$", new ModelA(newModelValue));
|
||||
template.updateFirst(query, update, DocumentWithCollection.class);
|
||||
|
||||
Query findQuery = new Query(Criteria.where("id").is(doc.id));
|
||||
DocumentWithCollection result = template.findOne(findQuery, DocumentWithCollection.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.id, is(doc.id));
|
||||
assertThat(result.model, is(notNullValue()));
|
||||
assertThat(result.model, hasSize(1));
|
||||
assertThat(result.model.get(0).value(), is(newModelValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONOGO-828
|
||||
*/
|
||||
@Test
|
||||
public void updateFirstShouldDoNothingWhenCalledForEntitiesThatDoNotExist() {
|
||||
|
||||
Query q = query(where("id").is(Long.MIN_VALUE));
|
||||
|
||||
template.updateFirst(q, Update.update("lastname", "supercalifragilisticexpialidocious"), VersionedPerson.class);
|
||||
assertThat(template.findOne(q, VersionedPerson.class), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-773
|
||||
*/
|
||||
@Test
|
||||
public void testShouldSupportQueryWithIncludedDbRefField() {
|
||||
|
||||
Sample sample = new Sample("47111", "foo");
|
||||
template.save(sample);
|
||||
|
||||
DocumentWithDBRefCollection doc = new DocumentWithDBRefCollection();
|
||||
doc.id = "4711";
|
||||
doc.dbRefProperty = sample;
|
||||
|
||||
template.save(doc);
|
||||
|
||||
Query qry = query(where("id").is(doc.id));
|
||||
qry.fields().include("dbRefProperty");
|
||||
|
||||
List<DocumentWithDBRefCollection> result = template.find(qry, DocumentWithDBRefCollection.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result, hasSize(1));
|
||||
assertThat(result.get(0), is(notNullValue()));
|
||||
assertThat(result.get(0).dbRefProperty, is(notNullValue()));
|
||||
assertThat(result.get(0).dbRefProperty.field, is(sample.field));
|
||||
}
|
||||
|
||||
static class DocumentWithDBRefCollection {
|
||||
|
||||
@Id public String id;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
public List<Sample> dbRefAnnotatedList;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
public Sample dbRefProperty;
|
||||
}
|
||||
|
||||
static class DocumentWithCollection {
|
||||
|
||||
@Id public String id;
|
||||
public List<Model> model;
|
||||
}
|
||||
|
||||
static interface Model {
|
||||
String value();
|
||||
|
||||
Model withValue(String value);
|
||||
}
|
||||
|
||||
static class ModelA implements Model {
|
||||
|
||||
private String value;
|
||||
|
||||
@Override
|
||||
public String value() {
|
||||
return this.value;
|
||||
ModelA(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Model withValue(String value) {
|
||||
this.value = value;
|
||||
return this;
|
||||
public String value() {
|
||||
return this.value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2039,6 +2326,13 @@ public class MongoTemplateTests {
|
||||
|
||||
@Id String id;
|
||||
String field;
|
||||
|
||||
public Sample() {}
|
||||
|
||||
public Sample(String id, String field) {
|
||||
this.id = id;
|
||||
this.field = field;
|
||||
}
|
||||
}
|
||||
|
||||
static class TestClass {
|
||||
@@ -2121,4 +2415,14 @@ public class MongoTemplateTests {
|
||||
static class ObjectWith3AliasedFieldsAndNestedAddress extends ObjectWith3AliasedFields {
|
||||
@Field("adr") Address address;
|
||||
}
|
||||
|
||||
static enum EnumValue {
|
||||
VALUE1, VALUE2, VALUE3
|
||||
}
|
||||
|
||||
static class ObjectWithEnumValue {
|
||||
|
||||
@Id String id;
|
||||
EnumValue value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
@@ -23,10 +23,12 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Scanner;
|
||||
|
||||
import org.joda.time.LocalDateTime;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -36,6 +38,7 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -84,6 +87,8 @@ public class AggregationTests {
|
||||
mongoTemplate.dropCollection(INPUT_COLLECTION);
|
||||
mongoTemplate.dropCollection(Product.class);
|
||||
mongoTemplate.dropCollection(UserWithLikes.class);
|
||||
mongoTemplate.dropCollection(DATAMONGO753.class);
|
||||
mongoTemplate.dropCollection(DATAMONGO788.class);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -439,6 +444,11 @@ public class AggregationTests {
|
||||
.and("netPrice").multiply(2).as("netPriceMul2") //
|
||||
.and("netPrice").divide(1.19).as("netPriceDiv119") //
|
||||
.and("spaceUnits").mod(2).as("spaceUnitsMod2") //
|
||||
.and("spaceUnits").plus("spaceUnits").as("spaceUnitsPlusSpaceUnits") //
|
||||
.and("spaceUnits").minus("spaceUnits").as("spaceUnitsMinusSpaceUnits") //
|
||||
.and("spaceUnits").multiply("spaceUnits").as("spaceUnitsMultiplySpaceUnits") //
|
||||
.and("spaceUnits").divide("spaceUnits").as("spaceUnitsDivideSpaceUnits") //
|
||||
.and("spaceUnits").mod("spaceUnits").as("spaceUnitsModSpaceUnits") //
|
||||
);
|
||||
|
||||
AggregationResults<DBObject> result = mongoTemplate.aggregate(agg, DBObject.class);
|
||||
@@ -452,7 +462,130 @@ public class AggregationTests {
|
||||
assertThat((Double) resultList.get(0).get("netPriceMul2"), is(netPrice * 2));
|
||||
assertThat((Double) resultList.get(0).get("netPriceDiv119"), is(netPrice / 1.19));
|
||||
assertThat((Integer) resultList.get(0).get("spaceUnitsMod2"), is(spaceUnits % 2));
|
||||
assertThat((Integer) resultList.get(0).get("spaceUnitsPlusSpaceUnits"), is(spaceUnits + spaceUnits));
|
||||
assertThat((Integer) resultList.get(0).get("spaceUnitsMinusSpaceUnits"), is(spaceUnits - spaceUnits));
|
||||
assertThat((Integer) resultList.get(0).get("spaceUnitsMultiplySpaceUnits"), is(spaceUnits * spaceUnits));
|
||||
assertThat((Double) resultList.get(0).get("spaceUnitsDivideSpaceUnits"), is((double) (spaceUnits / spaceUnits)));
|
||||
assertThat((Integer) resultList.get(0).get("spaceUnitsModSpaceUnits"), is(spaceUnits % spaceUnits));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-753
|
||||
* @see http
|
||||
* ://stackoverflow.com/questions/18653574/spring-data-mongodb-aggregation-framework-invalid-reference-in-group
|
||||
* -operati
|
||||
*/
|
||||
@Test
|
||||
public void allowsNestedFieldReferencesAsGroupIdsInGroupExpressions() {
|
||||
|
||||
mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("A", 1), new PD("B", 1), new PD("C", 1)));
|
||||
mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("B", 1), new PD("B", 1), new PD("C", 1)));
|
||||
|
||||
TypedAggregation<DATAMONGO753> agg = newAggregation(DATAMONGO753.class, //
|
||||
unwind("pd"), //
|
||||
group("pd.pDch") // the nested field expression
|
||||
.sum("pd.up").as("uplift"), //
|
||||
project("_id", "uplift"));
|
||||
|
||||
AggregationResults<DBObject> result = mongoTemplate.aggregate(agg, DBObject.class);
|
||||
List<DBObject> stats = result.getMappedResults();
|
||||
|
||||
assertThat(stats.size(), is(3));
|
||||
assertThat(stats.get(0).get("_id").toString(), is("C"));
|
||||
assertThat((Integer) stats.get(0).get("uplift"), is(2));
|
||||
assertThat(stats.get(1).get("_id").toString(), is("B"));
|
||||
assertThat((Integer) stats.get(1).get("uplift"), is(3));
|
||||
assertThat(stats.get(2).get("_id").toString(), is("A"));
|
||||
assertThat((Integer) stats.get(2).get("uplift"), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-753
|
||||
* @see http
|
||||
* ://stackoverflow.com/questions/18653574/spring-data-mongodb-aggregation-framework-invalid-reference-in-group
|
||||
* -operati
|
||||
*/
|
||||
@Test
|
||||
public void aliasesNestedFieldInProjectionImmediately() {
|
||||
|
||||
mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("A", 1), new PD("B", 1), new PD("C", 1)));
|
||||
mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("B", 1), new PD("B", 1), new PD("C", 1)));
|
||||
|
||||
TypedAggregation<DATAMONGO753> agg = newAggregation(DATAMONGO753.class, //
|
||||
unwind("pd"), //
|
||||
project().and("pd.up").as("up"));
|
||||
|
||||
AggregationResults<DBObject> results = mongoTemplate.aggregate(agg, DBObject.class);
|
||||
List<DBObject> mappedResults = results.getMappedResults();
|
||||
|
||||
assertThat(mappedResults, hasSize(6));
|
||||
for (DBObject element : mappedResults) {
|
||||
assertThat(element.get("up"), is((Object) 1));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-788
|
||||
*/
|
||||
@Test
|
||||
public void referencesToGroupIdsShouldBeRenderedProperly() {
|
||||
|
||||
mongoTemplate.insert(new DATAMONGO788(1, 1));
|
||||
mongoTemplate.insert(new DATAMONGO788(1, 1));
|
||||
mongoTemplate.insert(new DATAMONGO788(1, 1));
|
||||
mongoTemplate.insert(new DATAMONGO788(2, 1));
|
||||
mongoTemplate.insert(new DATAMONGO788(2, 1));
|
||||
|
||||
AggregationOperation projectFirst = Aggregation.project("x", "y").and("xField").as("x").and("yField").as("y");
|
||||
AggregationOperation group = Aggregation.group("x", "y").count().as("xPerY");
|
||||
AggregationOperation project = Aggregation.project("xPerY", "x", "y").andExclude("_id");
|
||||
|
||||
TypedAggregation<DATAMONGO788> aggregation = Aggregation.newAggregation(DATAMONGO788.class, projectFirst, group,
|
||||
project);
|
||||
AggregationResults<DBObject> aggResults = mongoTemplate.aggregate(aggregation, DBObject.class);
|
||||
List<DBObject> items = aggResults.getMappedResults();
|
||||
|
||||
assertThat(items.size(), is(2));
|
||||
assertThat((Integer) items.get(0).get("xPerY"), is(2));
|
||||
assertThat((Integer) items.get(0).get("x"), is(2));
|
||||
assertThat((Integer) items.get(0).get("y"), is(1));
|
||||
assertThat((Integer) items.get(1).get("xPerY"), is(3));
|
||||
assertThat((Integer) items.get(1).get("x"), is(1));
|
||||
assertThat((Integer) items.get(1).get("y"), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-806
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowGroupByIdFields() {
|
||||
|
||||
mongoTemplate.dropCollection(User.class);
|
||||
|
||||
LocalDateTime now = new LocalDateTime();
|
||||
|
||||
User user1 = new User("u1", new PushMessage("1", "aaa", now.toDate()));
|
||||
User user2 = new User("u2", new PushMessage("2", "bbb", now.minusDays(2).toDate()));
|
||||
User user3 = new User("u3", new PushMessage("3", "ccc", now.minusDays(1).toDate()));
|
||||
|
||||
mongoTemplate.save(user1);
|
||||
mongoTemplate.save(user2);
|
||||
mongoTemplate.save(user3);
|
||||
|
||||
Aggregation agg = newAggregation( //
|
||||
project("id", "msgs"), //
|
||||
unwind("msgs"), //
|
||||
match(where("msgs.createDate").gt(now.minusDays(1).toDate())), //
|
||||
group("id").push("msgs").as("msgs") //
|
||||
);
|
||||
|
||||
AggregationResults<DBObject> results = mongoTemplate.aggregate(agg, User.class, DBObject.class);
|
||||
|
||||
List<DBObject> mappedResults = results.getMappedResults();
|
||||
|
||||
DBObject firstItem = mappedResults.get(0);
|
||||
assertThat(firstItem.get("_id"), is(notNullValue()));
|
||||
assertThat(String.valueOf(firstItem.get("_id")), is("u1"));
|
||||
}
|
||||
|
||||
private void assertLikeStats(LikeStats like, String id, long count) {
|
||||
@@ -502,4 +635,73 @@ public class AggregationTests {
|
||||
assertThat(tagCount.getN(), is(n));
|
||||
}
|
||||
|
||||
static class DATAMONGO753 {
|
||||
PD[] pd;
|
||||
|
||||
DATAMONGO753 withPDs(PD... pds) {
|
||||
this.pd = pds;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
static class PD {
|
||||
String pDch;
|
||||
@org.springframework.data.mongodb.core.mapping.Field("alias") int up;
|
||||
|
||||
public PD(String pDch, int up) {
|
||||
this.pDch = pDch;
|
||||
this.up = up;
|
||||
}
|
||||
}
|
||||
|
||||
static class DATAMONGO788 {
|
||||
|
||||
int x;
|
||||
int y;
|
||||
int xField;
|
||||
int yField;
|
||||
|
||||
public DATAMONGO788() {}
|
||||
|
||||
public DATAMONGO788(int x, int y) {
|
||||
this.x = x;
|
||||
this.xField = x;
|
||||
this.y = y;
|
||||
this.yField = y;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-806
|
||||
*/
|
||||
static class User {
|
||||
|
||||
@Id String id;
|
||||
List<PushMessage> msgs;
|
||||
|
||||
public User() {}
|
||||
|
||||
public User(String id, PushMessage... msgs) {
|
||||
this.id = id;
|
||||
this.msgs = Arrays.asList(msgs);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-806
|
||||
*/
|
||||
static class PushMessage {
|
||||
|
||||
@Id String id;
|
||||
String content;
|
||||
Date createDate;
|
||||
|
||||
public PushMessage() {}
|
||||
|
||||
public PushMessage(String id, String content, Date createDate) {
|
||||
this.id = id;
|
||||
this.content = content;
|
||||
this.createDate = createDate;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,32 +15,150 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectUtils.*;
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link Aggregation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class AggregationUnitTests {
|
||||
|
||||
public @Rule ExpectedException exception = ExpectedException.none();
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullAggregationOperation() {
|
||||
Aggregation.newAggregation((AggregationOperation[]) null);
|
||||
newAggregation((AggregationOperation[]) null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNullTypedAggregationOperation() {
|
||||
Aggregation.newAggregation(String.class, (AggregationOperation[]) null);
|
||||
newAggregation(String.class, (AggregationOperation[]) null);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNoAggregationOperation() {
|
||||
Aggregation.newAggregation(new AggregationOperation[0]);
|
||||
newAggregation(new AggregationOperation[0]);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsNoTypedAggregationOperation() {
|
||||
Aggregation.newAggregation(String.class, new AggregationOperation[0]);
|
||||
newAggregation(String.class, new AggregationOperation[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-753
|
||||
*/
|
||||
@Test
|
||||
public void checkForCorrectFieldScopeTransfer() {
|
||||
|
||||
exception.expect(IllegalArgumentException.class);
|
||||
exception.expectMessage("Invalid reference");
|
||||
exception.expectMessage("'b'");
|
||||
|
||||
newAggregation( //
|
||||
project("a", "b"), //
|
||||
group("a").count().as("cnt"), // a was introduced to the context by the project operation
|
||||
project("cnt", "b") // b was removed from the context by the group operation
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT); // -> triggers IllegalArgumentException
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-753
|
||||
*/
|
||||
@Test
|
||||
public void unwindOperationShouldNotChangeAvailableFields() {
|
||||
|
||||
newAggregation( //
|
||||
project("a", "b"), //
|
||||
unwind("a"), //
|
||||
project("a", "b") // b should still be available
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-753
|
||||
*/
|
||||
@Test
|
||||
public void matchOperationShouldNotChangeAvailableFields() {
|
||||
|
||||
newAggregation( //
|
||||
project("a", "b"), //
|
||||
match(where("a").gte(1)), //
|
||||
project("a", "b") // b should still be available
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-788
|
||||
*/
|
||||
@Test
|
||||
public void referencesToGroupIdsShouldBeRenderedAsReferences() {
|
||||
|
||||
DBObject agg = newAggregation( //
|
||||
project("a"), //
|
||||
group("a").count().as("aCnt"), //
|
||||
project("aCnt", "a") //
|
||||
).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
DBObject secondProjection = ((List<DBObject>) agg.get("pipeline")).get(2);
|
||||
DBObject fields = getAsDBObject(secondProjection, "$project");
|
||||
assertThat(fields.get("aCnt"), is((Object) 1));
|
||||
assertThat(fields.get("a"), is((Object) "$_id.a"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-791
|
||||
*/
|
||||
@Test
|
||||
public void allowAggregationOperationsToBePassedAsIterable() {
|
||||
|
||||
List<AggregationOperation> ops = new ArrayList<AggregationOperation>();
|
||||
ops.add(project("a"));
|
||||
ops.add(group("a").count().as("aCnt"));
|
||||
ops.add(project("aCnt", "a"));
|
||||
|
||||
DBObject agg = newAggregation(ops).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
DBObject secondProjection = ((List<DBObject>) agg.get("pipeline")).get(2);
|
||||
DBObject fields = getAsDBObject(secondProjection, "$project");
|
||||
assertThat(fields.get("aCnt"), is((Object) 1));
|
||||
assertThat(fields.get("a"), is((Object) "$_id.a"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-791
|
||||
*/
|
||||
@Test
|
||||
public void allowTypedAggregationOperationsToBePassedAsIterable() {
|
||||
|
||||
List<AggregationOperation> ops = new ArrayList<AggregationOperation>();
|
||||
ops.add(project("a"));
|
||||
ops.add(group("a").count().as("aCnt"));
|
||||
ops.add(project("aCnt", "a"));
|
||||
|
||||
DBObject agg = newAggregation(DBObject.class, ops).toDbObject("foo", Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
DBObject secondProjection = ((List<DBObject>) agg.get("pipeline")).get(2);
|
||||
DBObject fields = getAsDBObject(secondProjection, "$project");
|
||||
assertThat(fields.get("aCnt"), is((Object) 1));
|
||||
assertThat(fields.get("a"), is((Object) "$_id.a"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,6 +37,38 @@ public class GroupOperationUnitTests {
|
||||
new GroupOperation((Fields) null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-759
|
||||
*/
|
||||
@Test
|
||||
public void groupOperationWithNoGroupIdFieldsShouldGenerateNullAsGroupId() {
|
||||
|
||||
GroupOperation operation = new GroupOperation(Fields.from());
|
||||
ExposedFields fields = operation.getFields();
|
||||
DBObject groupClause = extractDbObjectFromGroupOperation(operation);
|
||||
|
||||
assertThat(fields.exposesSingleFieldOnly(), is(true));
|
||||
assertThat(fields.exposesNoFields(), is(false));
|
||||
assertThat(groupClause.get(UNDERSCORE_ID), is(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-759
|
||||
*/
|
||||
@Test
|
||||
public void groupOperationWithNoGroupIdFieldsButAdditionalFieldsShouldGenerateNullAsGroupId() {
|
||||
|
||||
GroupOperation operation = new GroupOperation(Fields.from()).count().as("cnt").last("foo").as("foo");
|
||||
ExposedFields fields = operation.getFields();
|
||||
DBObject groupClause = extractDbObjectFromGroupOperation(operation);
|
||||
|
||||
assertThat(fields.exposesSingleFieldOnly(), is(false));
|
||||
assertThat(fields.exposesNoFields(), is(false));
|
||||
assertThat(groupClause.get(UNDERSCORE_ID), is(nullValue()));
|
||||
assertThat((BasicDBObject) groupClause.get("cnt"), is(new BasicDBObject("$sum", 1)));
|
||||
assertThat((BasicDBObject) groupClause.get("foo"), is(new BasicDBObject("$last", "$foo")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void createsGroupOperationWithSingleField() {
|
||||
|
||||
|
||||
@@ -25,12 +25,14 @@ import org.springframework.data.mongodb.core.DBObjectUtils;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link ProjectionOperation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class ProjectionOperationUnitTests {
|
||||
|
||||
@@ -65,7 +67,7 @@ public class ProjectionOperationUnitTests {
|
||||
DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT);
|
||||
DBObject projectClause = DBObjectUtils.getAsDBObject(dbObject, PROJECT);
|
||||
|
||||
assertThat(projectClause.get("foo"), is((Object) "$foo"));
|
||||
assertThat(projectClause.get("foo"), is((Object) 1));
|
||||
assertThat(projectClause.get("bar"), is((Object) "$foobar"));
|
||||
}
|
||||
|
||||
@@ -183,12 +185,89 @@ public class ProjectionOperationUnitTests {
|
||||
assertThat(oper.get(MOD), is((Object) Arrays.<Object> asList("$a", 3)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-758
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void excludeShouldThrowExceptionForFieldsOtherThanUnderscoreId() {
|
||||
|
||||
new ProjectionOperation().andExclude("foo");
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-758
|
||||
*/
|
||||
@Test
|
||||
public void excludeShouldAllowExclusionOfUnderscoreId() {
|
||||
|
||||
ProjectionOperation projectionOp = new ProjectionOperation().andExclude(Fields.UNDERSCORE_ID);
|
||||
DBObject dbObject = projectionOp.toDBObject(Aggregation.DEFAULT_CONTEXT);
|
||||
DBObject projectClause = DBObjectUtils.getAsDBObject(dbObject, PROJECT);
|
||||
assertThat((Integer) projectClause.get(Fields.UNDERSCORE_ID), is(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-757
|
||||
*/
|
||||
@Test
|
||||
public void usesImplictAndExplicitFieldAliasAndIncludeExclude() {
|
||||
|
||||
ProjectionOperation operation = Aggregation.project("foo").and("foobar").as("bar").andInclude("inc1", "inc2")
|
||||
.andExclude("_id");
|
||||
|
||||
DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT);
|
||||
DBObject projectClause = DBObjectUtils.getAsDBObject(dbObject, PROJECT);
|
||||
|
||||
assertThat(projectClause.get("foo"), is((Object) 1)); // implicit
|
||||
assertThat(projectClause.get("bar"), is((Object) "$foobar")); // explicit
|
||||
assertThat(projectClause.get("inc1"), is((Object) 1)); // include shortcut
|
||||
assertThat(projectClause.get("inc2"), is((Object) 1));
|
||||
assertThat(projectClause.get("_id"), is((Object) 0));
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void arithmenticProjectionOperationModByZeroException() {
|
||||
|
||||
new ProjectionOperation().and("a").mod(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-769
|
||||
*/
|
||||
@Test
|
||||
public void allowArithmeticOperationsWithFieldReferences() {
|
||||
|
||||
ProjectionOperation operation = Aggregation.project() //
|
||||
.and("foo").plus("bar").as("fooPlusBar") //
|
||||
.and("foo").minus("bar").as("fooMinusBar") //
|
||||
.and("foo").multiply("bar").as("fooMultiplyBar") //
|
||||
.and("foo").divide("bar").as("fooDivideBar") //
|
||||
.and("foo").mod("bar").as("fooModBar");
|
||||
|
||||
DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT);
|
||||
DBObject projectClause = DBObjectUtils.getAsDBObject(dbObject, PROJECT);
|
||||
|
||||
assertThat((BasicDBObject) projectClause.get("fooPlusBar"), //
|
||||
is(new BasicDBObject("$add", dbList("$foo", "$bar"))));
|
||||
assertThat((BasicDBObject) projectClause.get("fooMinusBar"), //
|
||||
is(new BasicDBObject("$subtract", dbList("$foo", "$bar"))));
|
||||
assertThat((BasicDBObject) projectClause.get("fooMultiplyBar"), //
|
||||
is(new BasicDBObject("$multiply", dbList("$foo", "$bar"))));
|
||||
assertThat((BasicDBObject) projectClause.get("fooDivideBar"), //
|
||||
is(new BasicDBObject("$divide", dbList("$foo", "$bar"))));
|
||||
assertThat((BasicDBObject) projectClause.get("fooModBar"), //
|
||||
is(new BasicDBObject("$mod", dbList("$foo", "$bar"))));
|
||||
}
|
||||
|
||||
public static BasicDBList dbList(Object... items) {
|
||||
|
||||
BasicDBList list = new BasicDBList();
|
||||
for (Object item : items) {
|
||||
list.add(item);
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private static DBObject exctractOperation(String field, DBObject fromProjectClause) {
|
||||
return (DBObject) fromProjectClause.get(field);
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import java.util.UUID;
|
||||
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
@@ -172,6 +173,17 @@ public class CustomConversionsUnitTests {
|
||||
assertThat(conversions.hasCustomReadTarget(String.class, URL.class), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-795
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("rawtypes")
|
||||
public void favorsCustomConverterForIndeterminedTargetType() {
|
||||
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(DateTimeToStringConverter.INSTANCE));
|
||||
assertThat(conversions.getCustomWriteTarget(DateTime.class, null), is(equalTo((Class) String.class)));
|
||||
}
|
||||
|
||||
enum FormatToStringConverter implements Converter<Format, String> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -207,4 +219,13 @@ public class CustomConversionsUnitTests {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
enum DateTimeToStringConverter implements Converter<DateTime, String> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public String convert(DateTime source) {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -56,6 +56,7 @@ import com.mongodb.QueryBuilder;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class QueryMapperUnitTests {
|
||||
@@ -438,6 +439,83 @@ public class QueryMapperUnitTests {
|
||||
assertThat(inClause.get(0), is(instanceOf(com.mongodb.DBRef.class)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-752
|
||||
*/
|
||||
@Test
|
||||
public void mapsSimpleValuesStartingWith$Correctly() {
|
||||
|
||||
Query query = query(where("myvalue").is("$334"));
|
||||
|
||||
DBObject result = mapper.getMappedObject(query.getQueryObject(), null);
|
||||
|
||||
assertThat(result.keySet(), hasSize(1));
|
||||
assertThat(result.get("myvalue"), is((Object) "$334"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-752
|
||||
*/
|
||||
@Test
|
||||
public void mapsKeywordAsSimpleValuesCorrectly() {
|
||||
|
||||
Query query = query(where("myvalue").is("$center"));
|
||||
|
||||
DBObject result = mapper.getMappedObject(query.getQueryObject(), null);
|
||||
|
||||
assertThat(result.keySet(), hasSize(1));
|
||||
assertThat(result.get("myvalue"), is((Object) "$center"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void shouldExcludeDBRefAssociation() {
|
||||
|
||||
Query query = query(where("someString").is("foo"));
|
||||
query.fields().exclude("reference");
|
||||
|
||||
BasicMongoPersistentEntity<?> entity = context.getPersistentEntity(WithDBRef.class);
|
||||
DBObject queryResult = mapper.getMappedObject(query.getQueryObject(), entity);
|
||||
DBObject fieldsResult = mapper.getMappedObject(query.getFieldsObject(), entity);
|
||||
|
||||
assertThat(queryResult.get("someString"), is((Object) "foo"));
|
||||
assertThat(fieldsResult.get("reference"), is((Object) 0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-686
|
||||
*/
|
||||
@Test
|
||||
public void queryMapperShouldNotChangeStateInGivenQueryObjectWhenIdConstrainedByInList() {
|
||||
|
||||
BasicMongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(Sample.class);
|
||||
String idPropertyName = persistentEntity.getIdProperty().getName();
|
||||
DBObject queryObject = query(where(idPropertyName).in("42")).getQueryObject();
|
||||
|
||||
Object idValuesBefore = getAsDBObject(queryObject, idPropertyName).get("$in");
|
||||
mapper.getMappedObject(queryObject, persistentEntity);
|
||||
Object idValuesAfter = getAsDBObject(queryObject, idPropertyName).get("$in");
|
||||
|
||||
assertThat(idValuesAfter, is(idValuesBefore));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-773
|
||||
*/
|
||||
@Test
|
||||
public void queryMapperShouldBeAbleToProcessQueriesThatIncludeDbRefFields() {
|
||||
|
||||
BasicMongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(WithDBRef.class);
|
||||
|
||||
Query qry = query(where("someString").is("abc"));
|
||||
qry.fields().include("reference");
|
||||
|
||||
DBObject mappedFields = mapper.getMappedObject(qry.getFieldsObject(), persistentEntity);
|
||||
assertThat(mappedFields, is(notNullValue()));
|
||||
}
|
||||
|
||||
class IdWrapper {
|
||||
Object id;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,25 +17,33 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectUtils.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.DBObjectUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link UpdateMapper}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class UpdateMapperUnitTests {
|
||||
@@ -43,11 +51,26 @@ public class UpdateMapperUnitTests {
|
||||
@Mock MongoDbFactory factory;
|
||||
MappingMongoConverter converter;
|
||||
MongoMappingContext context;
|
||||
UpdateMapper mapper;
|
||||
|
||||
private Converter<NestedEntity, DBObject> writingConverterSpy;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Before
|
||||
public void setUp() {
|
||||
context = new MongoMappingContext();
|
||||
converter = new MappingMongoConverter(factory, context);
|
||||
|
||||
this.writingConverterSpy = Mockito.spy(new NestedEntityWriteConverter());
|
||||
CustomConversions conversions = new CustomConversions(Arrays.asList(writingConverterSpy));
|
||||
|
||||
this.context = new MongoMappingContext();
|
||||
this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
|
||||
this.context.initialize();
|
||||
|
||||
this.converter = new MappingMongoConverter(factory, context);
|
||||
this.converter.setCustomConversions(conversions);
|
||||
this.converter.afterPropertiesSet();
|
||||
|
||||
this.mapper = new UpdateMapper(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -62,32 +85,170 @@ public class UpdateMapperUnitTests {
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject push = DBObjectUtils.getAsDBObject(mappedObject, "$push");
|
||||
DBObject list = DBObjectUtils.getAsDBObject(push, "list");
|
||||
DBObject push = getAsDBObject(mappedObject, "$push");
|
||||
DBObject list = getAsDBObject(push, "aliased");
|
||||
|
||||
assertThat(list.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldRetainTypeInformationForNestedEntities() {
|
||||
|
||||
Update update = Update.update("model", new ModelImpl(1));
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
DBObject modelDbObject = (DBObject) set.get("model");
|
||||
assertThat(modelDbObject.get("_class"), not(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldNotPersistTypeInformationForKnownSimpleTypes() {
|
||||
|
||||
Update update = Update.update("model.value", 1);
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("_class"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldNotPersistTypeInformationForNullValues() {
|
||||
|
||||
Update update = Update.update("model", null);
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("_class"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldRetainTypeInformationForNestedCollectionElements() {
|
||||
|
||||
Update update = Update.update("list.$", new ConcreteChildClass("42", "bubu"));
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
DBObject modelDbObject = getAsDBObject(set, "aliased.$");
|
||||
assertThat(modelDbObject.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldSupportNestedCollectionElementUpdates() {
|
||||
|
||||
Update update = Update.update("list.$.value", "foo").set("list.$.otherValue", "bar");
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("aliased.$.value"), is((Object) "foo"));
|
||||
assertThat(set.get("aliased.$.otherValue"), is((Object) "bar"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldWriteTypeInformationForComplexNestedCollectionElementUpdates() {
|
||||
|
||||
Update update = Update.update("list.$.value", "foo").set("list.$.someObject", new ConcreteChildClass("42", "bubu"));
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject dbo = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(dbo.get("aliased.$.value"), is((Object) "foo"));
|
||||
|
||||
DBObject someObject = getAsDBObject(dbo, "aliased.$.someObject");
|
||||
assertThat(someObject, is(notNullValue()));
|
||||
assertThat(someObject.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
assertThat(someObject.get("value"), is((Object) "bubu"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-410
|
||||
*/
|
||||
@Test
|
||||
public void testUpdateMapperShouldConsiderCustomWriteTarget() {
|
||||
|
||||
List<NestedEntity> someValues = Arrays.asList(new NestedEntity("spring"), new NestedEntity("data"),
|
||||
new NestedEntity("mongodb"));
|
||||
NestedEntity[] array = new NestedEntity[someValues.size()];
|
||||
|
||||
Update update = new Update().pushAll("collectionOfNestedEntities", someValues.toArray(array));
|
||||
mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DomainEntity.class));
|
||||
|
||||
verify(writingConverterSpy, times(3)).convert(Mockito.any(NestedEntity.class));
|
||||
}
|
||||
|
||||
static interface Model {}
|
||||
|
||||
static class ModelImpl implements Model {
|
||||
public int value;
|
||||
|
||||
public ModelImpl(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
public class ModelWrapper {
|
||||
Model model;
|
||||
}
|
||||
|
||||
static class ParentClass {
|
||||
|
||||
String id;
|
||||
|
||||
@Field("aliased")//
|
||||
List<? extends AbstractChildClass> list;
|
||||
|
||||
public ParentClass(String id, List<? extends AbstractChildClass> list) {
|
||||
this.id = id;
|
||||
this.list = list;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static abstract class AbstractChildClass {
|
||||
|
||||
String id;
|
||||
String value;
|
||||
String otherValue;
|
||||
AbstractChildClass someObject;
|
||||
|
||||
public AbstractChildClass(String id, String value) {
|
||||
this.id = id;
|
||||
this.value = value;
|
||||
this.otherValue = "other_" + value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,4 +258,27 @@ public class UpdateMapperUnitTests {
|
||||
super(id, value);
|
||||
}
|
||||
}
|
||||
|
||||
static class DomainEntity {
|
||||
List<NestedEntity> collectionOfNestedEntities;
|
||||
}
|
||||
|
||||
static class NestedEntity {
|
||||
String name;
|
||||
|
||||
public NestedEntity(String name) {
|
||||
super();
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
static class NestedEntityWriteConverter implements Converter<NestedEntity, DBObject> {
|
||||
|
||||
@Override
|
||||
public DBObject convert(NestedEntity source) {
|
||||
return new BasicDBObject();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import static org.springframework.data.mongodb.core.query.Update.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -55,6 +56,7 @@ import com.mongodb.MongoException;
|
||||
/**
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingTests {
|
||||
|
||||
@@ -512,28 +514,96 @@ public class MappingTests {
|
||||
assertThat(result.items.get(0).id, is(items.id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void supportExcludeDbRefAssociation() {
|
||||
|
||||
template.dropCollection(Item.class);
|
||||
template.dropCollection(Container.class);
|
||||
|
||||
Item item = new Item();
|
||||
template.insert(item);
|
||||
|
||||
Container container = new Container("foo");
|
||||
container.item = item;
|
||||
|
||||
template.insert(container);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is("foo"));
|
||||
query.fields().exclude("item");
|
||||
Container result = template.findOne(query, Container.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.item, is(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void shouldMapFieldsOfIterableEntity() {
|
||||
|
||||
template.dropCollection(IterableItem.class);
|
||||
template.dropCollection(Container.class);
|
||||
|
||||
Item item = new IterableItem();
|
||||
item.value = "bar";
|
||||
template.insert(item);
|
||||
|
||||
Container container = new Container("foo");
|
||||
container.item = item;
|
||||
|
||||
template.insert(container);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is("foo"));
|
||||
Container result = template.findOne(query, Container.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.item, is(notNullValue()));
|
||||
assertThat(result.item.value, is("bar"));
|
||||
}
|
||||
|
||||
static class Container {
|
||||
|
||||
@Id
|
||||
final String id;
|
||||
@Id final String id;
|
||||
|
||||
public Container() {
|
||||
id = new ObjectId().toString();
|
||||
}
|
||||
|
||||
@DBRef
|
||||
Item item;
|
||||
@DBRef
|
||||
List<Item> items;
|
||||
public Container(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@DBRef Item item;
|
||||
@DBRef List<Item> items;
|
||||
}
|
||||
|
||||
static class Item {
|
||||
|
||||
@Id
|
||||
final String id;
|
||||
@Id final String id;
|
||||
String value;
|
||||
|
||||
public Item() {
|
||||
this.id = new ObjectId().toString();
|
||||
}
|
||||
}
|
||||
|
||||
static class IterableItem extends Item implements Iterable<ItemData> {
|
||||
|
||||
List<ItemData> data = new ArrayList<MappingTests.ItemData>();
|
||||
|
||||
@Override
|
||||
public Iterator<ItemData> iterator() {
|
||||
return data.iterator();
|
||||
}
|
||||
}
|
||||
|
||||
static class ItemData {
|
||||
|
||||
String id;
|
||||
String value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,6 +21,7 @@ import static org.junit.Assert.*;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.mongodb.core.DBObjectUtils;
|
||||
import org.springframework.data.mongodb.core.geo.Distance;
|
||||
import org.springframework.data.mongodb.core.geo.Metric;
|
||||
import org.springframework.data.mongodb.core.geo.Metrics;
|
||||
@@ -31,6 +32,7 @@ import org.springframework.data.mongodb.core.geo.Point;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class NearQueryUnitTests {
|
||||
|
||||
@@ -123,4 +125,36 @@ public class NearQueryUnitTests {
|
||||
assertThat(query.getSkip(), is(pageable.getPageNumber() * pageable.getPageSize()));
|
||||
assertThat((Integer) query.toDBObject().get("num"), is((pageable.getPageNumber() + 1) * pageable.getPageSize()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-829
|
||||
*/
|
||||
@Test
|
||||
public void nearQueryShouldInoreZeroLimitFromQuery() {
|
||||
|
||||
NearQuery query = NearQuery.near(new Point(1, 2)).query(Query.query(Criteria.where("foo").is("bar")));
|
||||
assertThat(query.toDBObject().get("num"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONOGO-829
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void nearQueryShouldThrowExceptionWhenGivenANullQuery() {
|
||||
NearQuery.near(new Point(1, 2)).query(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-829
|
||||
*/
|
||||
@Test
|
||||
public void numShouldNotBeAlteredByQueryWithoutPageable() {
|
||||
|
||||
int num = 100;
|
||||
NearQuery query = NearQuery.near(new Point(1, 2));
|
||||
query.num(num);
|
||||
query.query(Query.query(Criteria.where("foo").is("bar")));
|
||||
|
||||
assertThat(DBObjectUtils.getTypedValue(query.toDBObject(), "num", Integer.class), is(num));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -42,15 +42,15 @@ import com.mongodb.gridfs.GridFSFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:gridfs/gridfs.xml")
|
||||
public class GridFsTemplateIIntegrationTests {
|
||||
public class GridFsTemplateIntegrationTests {
|
||||
|
||||
Resource resource = new ClassPathResource("gridfs/gridfs.xml");
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
@Autowired GridFsOperations operations;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
@@ -127,6 +127,14 @@ public class GridFsTemplateIIntegrationTests {
|
||||
assertThat(resources[0].getContentType(), is(reference.getContentType()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-813
|
||||
*/
|
||||
@Test
|
||||
public void getResourceShouldReturnNullForNonExistingResource() {
|
||||
assertThat(operations.getResource("doesnotexist"), is(nullValue()));
|
||||
}
|
||||
|
||||
private static void assertSame(GridFSFile left, GridFSFile right) {
|
||||
|
||||
assertThat(left.getId(), is(right.getId()));
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -680,4 +680,16 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
assertThat(results.isLastPage(), is(true));
|
||||
assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-871
|
||||
*/
|
||||
@Test
|
||||
public void findsPersonsByFirstnameAsArray() {
|
||||
|
||||
Person[] result = repository.findByThePersonsFirstnameAsArray("Leroi");
|
||||
|
||||
assertThat(result, is(arrayWithSize(1)));
|
||||
assertThat(result, is(arrayContaining(leroi)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -69,6 +69,12 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
@Query(value = "{ 'firstname' : ?0 }", fields = "{ 'firstname': 1, 'lastname': 1}")
|
||||
List<Person> findByThePersonsFirstname(String firstname);
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-871
|
||||
*/
|
||||
@Query(value = "{ 'firstname' : ?0 }")
|
||||
Person[] findByThePersonsFirstnameAsArray(String firstname);
|
||||
|
||||
/**
|
||||
* Returns all {@link Person}s with a firstname matching the given one (*-wildcard supported).
|
||||
*
|
||||
|
||||
@@ -21,7 +21,14 @@ import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@Document
|
||||
public class User {
|
||||
|
||||
@Id
|
||||
String id;
|
||||
@Id String id;
|
||||
String username;
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public void setUsername(String username) {
|
||||
this.username = username;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.custom;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.repository.User;
|
||||
import org.springframework.data.repository.Repository;
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface CustomMongoRepository extends Repository<User, String> {
|
||||
|
||||
List<User> findByUsernameCustom(String username);
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.custom;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.repository.User;
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class CustomMongoRepositoryImpl implements CustomMongoRepository {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.custom.CustomMongoRepository#findByFullName()
|
||||
*/
|
||||
@Override
|
||||
public List<User> findByUsernameCustom(String username) {
|
||||
|
||||
User user = new User();
|
||||
user.setUsername(username);
|
||||
return Arrays.asList(user);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.custom;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.ImportResource;
|
||||
import org.springframework.data.mongodb.repository.User;
|
||||
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
/**
|
||||
* Integration tests for custom Repository implementations.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class CustomRepositoryImplementationTests {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
@ImportResource("classpath:infrastructure.xml")
|
||||
static class Config {}
|
||||
|
||||
@Autowired CustomMongoRepository customMongoRepository;
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-804
|
||||
*/
|
||||
@Test
|
||||
public void shouldExecuteMethodOnCustomRepositoryImplementation() {
|
||||
|
||||
String username = "bubu";
|
||||
List<User> users = customMongoRepository.findByUsernameCustom(username);
|
||||
|
||||
assertThat(users.size(), is(1));
|
||||
assertThat(users.get(0), is(notNullValue()));
|
||||
assertThat(users.get(0).getUsername(), is(username));
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,7 @@ import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.repository.PersonRepository;
|
||||
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
@@ -39,15 +40,11 @@ import org.springframework.test.util.ReflectionTestUtils;
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class MongoRepositoryFactoryBeanUnitTests {
|
||||
|
||||
@Mock
|
||||
MongoOperations operations;
|
||||
@Mock MongoOperations operations;
|
||||
|
||||
@Mock
|
||||
MongoConverter converter;
|
||||
@Mock MongoConverter converter;
|
||||
|
||||
@Mock
|
||||
@SuppressWarnings("rawtypes")
|
||||
MappingContext context;
|
||||
@Mock @SuppressWarnings("rawtypes") MappingContext context;
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("rawtypes")
|
||||
@@ -75,6 +72,7 @@ public class MongoRepositoryFactoryBeanUnitTests {
|
||||
when(operations.getConverter()).thenReturn(converter);
|
||||
when(converter.getMappingContext()).thenReturn(context);
|
||||
|
||||
factoryBean.setRepositoryInterface(PersonRepository.class);
|
||||
factoryBean.setMongoOperations(operations);
|
||||
factoryBean.afterPropertiesSet();
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mysema.query.types.expr.BooleanOperation;
|
||||
import com.mysema.query.types.path.PathBuilder;
|
||||
import com.mysema.query.types.path.SimplePath;
|
||||
import com.mysema.query.types.path.StringPath;
|
||||
|
||||
/**
|
||||
@@ -46,8 +47,7 @@ import com.mysema.query.types.path.StringPath;
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class SpringDataMongodbSerializerUnitTests {
|
||||
|
||||
@Mock
|
||||
MongoDbFactory dbFactory;
|
||||
@Mock MongoDbFactory dbFactory;
|
||||
MongoConverter converter;
|
||||
SpringDataMongodbSerializer serializer;
|
||||
|
||||
@@ -117,10 +117,23 @@ public class SpringDataMongodbSerializerUnitTests {
|
||||
assertThat(result.get("_id"), is((Object) id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-761
|
||||
*/
|
||||
@Test
|
||||
public void looksUpKeyForNonPropertyPath() {
|
||||
|
||||
PathBuilder<Address> builder = new PathBuilder<Address>(Address.class, "address");
|
||||
SimplePath<Object> firstElementPath = builder.getArray("foo", String[].class).get(0);
|
||||
String path = serializer.getKeyForPath(firstElementPath, firstElementPath.getMetadata());
|
||||
|
||||
assertThat(path, is("0"));
|
||||
}
|
||||
|
||||
class Address {
|
||||
String id;
|
||||
String street;
|
||||
@Field("zip_code")
|
||||
String zipCode;
|
||||
@Field("zip_code") String zipCode;
|
||||
@Field("bar") String[] foo;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
<xi:include href="introduction/why-sd-doc.xml"/>
|
||||
<xi:include href="introduction/requirements.xml"/>
|
||||
<xi:include href="introduction/getting-started.xml"/>
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.0.RELEASE/src/docbkx/repositories.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.6.5.RELEASE/src/docbkx/repositories.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repositories.xml" />
|
||||
</xi:include>
|
||||
</part>
|
||||
@@ -76,10 +76,10 @@
|
||||
<part id="appendix">
|
||||
<title>Appendix</title>
|
||||
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.0.RELEASE/src/docbkx/repository-namespace-reference.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.6.5.RELEASE/src/docbkx/repository-namespace-reference.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repository-namespace-reference.xml" />
|
||||
</xi:include>
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.0.RELEASE/src/docbkx/repository-query-keywords-reference.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.6.5.RELEASE/src/docbkx/repository-query-keywords-reference.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repository-query-keywords-reference.xml" />
|
||||
</xi:include>
|
||||
</part>
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
<title>Community Forum</title>
|
||||
|
||||
<para>The Spring Data <ulink
|
||||
url="http://forum.springframework.org/forumdisplay.php?f=80">forum
|
||||
url="http://forum.spring.io/forum/spring-projects/data/nosql">forum
|
||||
</ulink> is a message board for all Spring Data (not just Document)
|
||||
users to share information and help each other. Note that registration
|
||||
is needed <emphasis>only</emphasis> for posting.</para>
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
<para>Professional, from-the-source support, with guaranteed response
|
||||
time, is available from <ulink
|
||||
url="http://www.springsource.com">SpringSource</ulink>, the company
|
||||
url="http://www.gopivotal.com/">Pivotal Software, Inc.</ulink>, the company
|
||||
behind Spring Data and Spring.</para>
|
||||
</section>
|
||||
</section>
|
||||
@@ -40,12 +40,12 @@
|
||||
|
||||
<para>For information on the Spring Data Mongo source code repository,
|
||||
nightly builds and snapshot artifacts please see the <ulink
|
||||
url="http://www.springsource.org/spring-data/mongodb">Spring Data Mongo
|
||||
url="http://projects.spring.io/spring-data-mongodb">Spring Data Mongo
|
||||
homepage</ulink>.</para>
|
||||
|
||||
<para>You can help make Spring Data best serve the needs of the Spring
|
||||
community by interacting with developers through the Spring Community
|
||||
<ulink url="http://forum.springsource.org">forums</ulink>. To follow
|
||||
<ulink url="http://forum.spring.io">forums</ulink>. To follow
|
||||
developer activity look for the mailing list information on the Spring
|
||||
Data Mongo homepage.</para>
|
||||
|
||||
@@ -55,10 +55,10 @@
|
||||
|
||||
<para>To stay up to date with the latest news and announcements in the
|
||||
Spring eco system, subscribe to the Spring Community <ulink
|
||||
url="http://www.springframework.org/">Portal</ulink>.</para>
|
||||
url="http://spring.io">Portal</ulink>.</para>
|
||||
|
||||
<para>Lastly, you can follow the SpringSource Data <ulink
|
||||
url="http://blog.springsource.com/category/data-access/">blog </ulink>or
|
||||
url="http://spring.io/blog/">blog </ulink>or
|
||||
the project team on Twitter (<ulink
|
||||
url="http://twitter.com/SpringData">SpringData</ulink>)</para>
|
||||
</section>
|
||||
|
||||
@@ -12,17 +12,17 @@
|
||||
<title>Knowing Spring</title>
|
||||
|
||||
<para>Spring Data uses Spring framework's <ulink
|
||||
url="http://static.springframework.org/spring/docs/3.0.x/reference/html/spring-core.html">core</ulink>
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/spring-core.html">core</ulink>
|
||||
functionality, such as the <ulink
|
||||
url="http://static.springframework.org/spring/docs/3.0.x/reference/html/beans.html">IoC</ulink>
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/beans.html">IoC</ulink>
|
||||
container, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/validation.html#core-convert">type
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/validation.html#core-convert">type
|
||||
conversion system</ulink>, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/expressions.html">expression
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/expressions.html">expression
|
||||
language</ulink>, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/jmx.html">JMX
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/jmx.html">JMX
|
||||
integration</ulink>, and portable <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/dao.html#dao-exceptions">DAO
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/dao.html#dao-exceptions">DAO
|
||||
exception hierarchy</ulink>. While it is not important to know the
|
||||
Spring APIs, understanding the concepts behind them is. At a minimum,
|
||||
the idea behind IoC should be familiar for whatever IoC container you
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<para>Spring Data Document 1.x binaries requires JDK level 6.0 and above,
|
||||
and
|
||||
<ulink url="http://www.springsource.org/documentation">Spring Framework</ulink>
|
||||
<ulink url="http://spring.io/docs">Spring Framework</ulink>
|
||||
3.0.x and above.
|
||||
</para>
|
||||
<para>
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
you perform administrative operations such as drop or create a database. The
|
||||
JMX features build upon the JMX feature set available in the Spring
|
||||
Framework. See <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/jmx.html">here
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/jmx.html">here
|
||||
</ulink> for more details.</para>
|
||||
|
||||
<section id="mongodb:jmx-configuration">
|
||||
|
||||
@@ -369,7 +369,11 @@ public class Person {
|
||||
Spring Framework . Within the mapping framework it can be applied to
|
||||
constructor arguments. This lets you use a Spring Expression
|
||||
Language statement to transform a key's value retrieved in the
|
||||
database before it is used to construct a domain object.</para>
|
||||
database before it is used to construct a domain object. In order to
|
||||
reference a property of a given document one has to use expressions
|
||||
like: <code>@Value("#root.myProperty")</code> where
|
||||
<literal>root</literal> refers to the root of the given
|
||||
document.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
@@ -444,7 +448,75 @@ public class Person<T extends Address> {
|
||||
// other getters/setters ommitted
|
||||
</programlisting>
|
||||
|
||||
<para></para>
|
||||
<para/>
|
||||
</section>
|
||||
|
||||
<section id="mapping-custom-object-construction">
|
||||
<title>Customized Object Construction</title>
|
||||
|
||||
<para>The Mapping Subsystem allows the customization of the object
|
||||
construction by annotating a constructor with the
|
||||
<literal>@PersistenceConstructor</literal> annotation. The values to be
|
||||
used for the constructor parameters are resolved in the following
|
||||
way:</para>
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>If a parameter is annotated with the <code>@Value</code>
|
||||
annotation, the given expression is evaluated and the result is used
|
||||
as the parameter value.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>If the Java type has a property whose name matches the given
|
||||
field of the input document, then it's property information is used
|
||||
to select the appropriate constructor parameter to pass the input
|
||||
field value to. This works only if the parameter name information is
|
||||
present in the java .class files which can be achieved by compiling
|
||||
the source with debug information or using the new
|
||||
<literal>-parameters</literal> command-line switch for javac in Java
|
||||
8.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>Otherwise an <classname>MappingException</classname> will be
|
||||
thrown indicating that the given constructor parameter could not be
|
||||
bound.</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
||||
<programlisting language="java">class OrderItem {
|
||||
|
||||
@Id String id;
|
||||
int quantity;
|
||||
double unitPrice;
|
||||
|
||||
OrderItem(String id, @Value("#root.qty ?: 0") int quantity, double unitPrice) {
|
||||
this.id = id;
|
||||
this.quantity = quantity;
|
||||
this.unitPrice = unitPrice;
|
||||
}
|
||||
|
||||
// getters/setters ommitted
|
||||
}
|
||||
|
||||
DBObject input = new BasicDBObject("id", "4711");
|
||||
input.put("unitPrice", 2.5);
|
||||
input.put("qty",5);
|
||||
OrderItem item = converter.read(OrderItem.class, input);</programlisting>
|
||||
|
||||
<note>
|
||||
<para>The SpEL expression in the <literal>@Value</literal> annotation
|
||||
of the <literal>quantity</literal> parameter falls back to the value
|
||||
<literal>0</literal> if the given property path cannot be
|
||||
resolved.</para>
|
||||
</note>
|
||||
|
||||
<para>Additional examples for using the
|
||||
<classname>@PersistenceConstructor</classname> annotation can be found
|
||||
in the <ulink
|
||||
url="https://github.com/spring-projects/spring-data-mongodb/blob/master/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java">MappingMongoConverterUnitTests</ulink>
|
||||
test suite.</para>
|
||||
</section>
|
||||
|
||||
<section id="mapping-usage-indexes">
|
||||
@@ -566,7 +638,7 @@ public class Person {
|
||||
<para>Spring 3.0 introduced a core.convert package that provides a
|
||||
general type conversion system. This is described in detail in the
|
||||
Spring reference documentation section entitled <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/validation.html#core-convert">Spring
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/validation.html#core-convert">Spring
|
||||
3 Type Conversion</ulink>.</para>
|
||||
</note>
|
||||
|
||||
@@ -608,7 +680,7 @@ public class Person {
|
||||
|
||||
}</programlisting>
|
||||
|
||||
<para></para>
|
||||
<para/>
|
||||
</section>
|
||||
</section>
|
||||
</chapter>
|
||||
|
||||
@@ -84,7 +84,7 @@
|
||||
or higher. The latest production release (2.0.x as of this writing) is
|
||||
recommended. An easy way to bootstrap setting up a working environment is
|
||||
to create a Spring based project in <ulink
|
||||
url="http://www.springsource.com/developer/sts">STS</ulink>.</para>
|
||||
url="http://spring.io/tools/sts">STS</ulink>.</para>
|
||||
|
||||
<para>First you need to set up a running Mongodb server. Refer to the
|
||||
<ulink url="http://www.mongodb.org/display/DOCS/Quickstart">Mongodb Quick
|
||||
@@ -264,9 +264,9 @@ public class MongoApp {
|
||||
<para>For those not familiar with how to configure the Spring
|
||||
container using Java based bean metadata instead of XML based metadata
|
||||
see the high level introduction in the reference docs <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/new-in-3.html#new-java-configuration"
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/new-in-3.html#new-java-configuration"
|
||||
userlevel="">here </ulink> as well as the detailed documentation<ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/beans.html#beans-java-instantiating-container">
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/beans.html#beans-java-instantiating-container">
|
||||
here</ulink>.</para>
|
||||
</note></para>
|
||||
|
||||
@@ -310,7 +310,7 @@ public class AppConfig {
|
||||
classes annoated with the <literal>@Repository</literal> annotation.
|
||||
This hierarchy and use of <literal>@Repository</literal> is described in
|
||||
<ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/dao.html">Spring's
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/dao.html">Spring's
|
||||
DAO support features</ulink>.</para>
|
||||
|
||||
<para>An example of a Java based bean metadata that supports exception
|
||||
@@ -1986,7 +1986,7 @@ GeoResults<Restaurant> = operations.geoNear(query, Restaurant.class);</pro
|
||||
methods on MongoOperations to simplify the creation and execution of
|
||||
Map-Reduce operations. It can convert the results of a Map-Reduce
|
||||
operation to a POJO also integrates with Spring's <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/resources.html">Resource
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/resources.html">Resource
|
||||
abstraction</ulink> abstraction. This will let you place your JavaScript
|
||||
files on the file system, classpath, http server or any other Spring
|
||||
Resource implementation and then reference the JavaScript resources via an
|
||||
@@ -2100,7 +2100,7 @@ MapReduceResults<ValueObject> results = mongoOperations.mapReduce(query, "
|
||||
providing methods on MongoOperations to simplify the creation and
|
||||
execution of group operations. It can convert the results of the group
|
||||
operation to a POJO and also integrates with Spring's <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/resources.html">Resource
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/resources.html">Resource
|
||||
abstraction</ulink> abstraction. This will let you place your JavaScript
|
||||
files on the file system, classpath, http server or any other Spring
|
||||
Resource implementation and then reference the JavaScript resources via an
|
||||
@@ -2710,7 +2710,7 @@ List<DBObject> resultList = result.getMappedResults();</programlisting>
|
||||
<note>
|
||||
<para>For more information on the Spring type conversion service see the
|
||||
reference docs <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/validation.html#core-convert">here</ulink>.</para>
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/validation.html#core-convert">here</ulink>.</para>
|
||||
</note>
|
||||
|
||||
<section id="mongo.custom-converters.writer">
|
||||
@@ -3069,7 +3069,7 @@ mongoTemplate.dropCollection("MyNewCollection"); </programlisting>
|
||||
interface.</para>
|
||||
|
||||
<para>The motivation behind mapping to Spring's <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/dao.html#dao-exceptions">consistent
|
||||
url="http://docs.spring.io/spring/docs/3.0.x/reference/dao.html#dao-exceptions">consistent
|
||||
data access exception hierarchy</ulink> is that you are then able to write
|
||||
portable and descriptive exception handling code without resorting to
|
||||
coding against <ulink
|
||||
@@ -3215,8 +3215,8 @@ mongoTemplate.dropCollection("MyNewCollection"); </programlisting>
|
||||
</beans></programlisting>
|
||||
</example>
|
||||
|
||||
<para>You can no get the template injected and perform storing and
|
||||
retrieving operations to it.</para>
|
||||
<para>The template can now be injected and used to perform storage and
|
||||
retrieval operations.</para>
|
||||
|
||||
<example>
|
||||
<title>Using GridFsTemplate to store files</title>
|
||||
|
||||
@@ -1,6 +1,164 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 1.3.5.RELEASE (2014-03-10)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-829] - NearQuery, when used in conjunction with a Query, no longer sets num=0, unless Query specifies otherwise.
|
||||
* [DATAMONGO-871] - Repository queries support array return type.
|
||||
** Improvement
|
||||
* [DATAMONGO-865] - Avoid ClassNotFoundException during test runs.
|
||||
|
||||
Changes in version 1.4.0.RELEASE (2014-02-24)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-354] - MongoTemplate should support multiple $pushAll in one update.
|
||||
* [DATAMONGO-404] - Removing a DBRef using pull does not work.
|
||||
* [DATAMONGO-410] - Update with pushAll should recognize defined Converter.
|
||||
* [DATAMONGO-812] - $pushAll is deprecated since mongodb 2.4 move to $push $each.
|
||||
* [DATAMONGO-830] - Fix NPE during cache warmup in CustomConversions.
|
||||
* [DATAMONGO-838] - Support for refering to expression based field in group operation.
|
||||
* [DATAMONGO-840] - Support for nested MongoDB field references in SpEL expressions within Projections.
|
||||
* [DATAMONGO-842] - Fix documentation error in GRIDFS section.
|
||||
* [DATAMONGO-852] - Increase version for update should traverse DBObject correctly in order to find version property.
|
||||
|
||||
** Improvement
|
||||
* [DATAMONGO-468] - Simplification for updates of DBRef fields with mongoTemplate.
|
||||
* [DATAMONGO-849] - Documentation on github should not reference invalid class.
|
||||
|
||||
** Task
|
||||
* [DATAMONGO-848] - Ensure compatibility with Mongo Java driver 2.12.
|
||||
* [DATAMONGO-853] - Update no longer allows null keys.
|
||||
* [DATAMONGO-856] - Update documentation.
|
||||
|
||||
Changes in version 1.3.4.RELEASE (2014-02-17)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-407] - Collection with generics losing element type after $set update
|
||||
* [DATAMONGO-410] - Update with pushAll doesnt recognize defined Converter
|
||||
* [DATAMONGO-686] - ClassCastException while reusing Query object
|
||||
* [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException
|
||||
* [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException
|
||||
* [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly
|
||||
* [DATAMONGO-811] - updateFirst methods do not increment @Version field
|
||||
* [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums.
|
||||
* [DATAMONGO-828] - UpdateFirst throws OptimisticLockingFailureException when updating document that does not exist
|
||||
* [DATAMONGO-830] - NPE during cache warmup in CustomConversions
|
||||
* [DATAMONGO-842] - Documentation error in GRIDFS section
|
||||
** Improvement
|
||||
* [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file
|
||||
** Task
|
||||
* [DATAMONGO-824] - Add contribution guidelines
|
||||
* [DATAMONGO-846] - Release 1.3.4
|
||||
|
||||
Changes in version 1.4.0.RC1 (2014-01-29)
|
||||
-----------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-407] - Collection with generics losing element type after $set update
|
||||
* [DATAMONGO-686] - ClassCastException while reusing Query object
|
||||
* [DATAMONGO-726] - References to non existing classes in namespace XSD
|
||||
* [DATAMONGO-804] - EnableMongoRepositories repositoryImplementationPostfix() default is empty String instead of "Impl"
|
||||
* [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException
|
||||
* [DATAMONGO-806] - Spring Data MongoDB - Aggregation Framework - No property _id found for type com.entity.User
|
||||
* [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException
|
||||
* [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly
|
||||
* [DATAMONGO-811] - updateFirst methods do not increment @Version field
|
||||
* [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums.
|
||||
** Improvement
|
||||
* [DATAMONGO-778] - Create geospatial index of type other than 2d with @GeoSpatialIndexed
|
||||
* [DATAMONGO-785] - Add support for geospatial 2Dsphere and geohaystack index types
|
||||
* [DATAMONGO-787] - Guard against SpEL issue in Spring 3.2.4
|
||||
* [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x
|
||||
* [DATAMONGO-802] - Change AbstractMongoConfiguration.mongoDbFactory() to return MongoDbFactory
|
||||
* [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file
|
||||
* [DATAMONGO-822] - Add support for eager CDI repository instantiation
|
||||
* [DATAMONGO-823] - Add bucket attribute to <mongo:gridFsTemplate />
|
||||
* [DATAMONGO-837] - Upgrade mongodb java driver to 2.11.4
|
||||
** Task
|
||||
* [DATAMONGO-790] - Ensure compatibility with Spring Framework 4.0
|
||||
* [DATAMONGO-824] - Add contribution guidelines
|
||||
* [DATAMONGO-826] - Release Spring Data MongoDB 1.4.0.RC1
|
||||
* [DATAMONGO-835] - Code cleanups
|
||||
|
||||
Changes in version 1.3.3.RELEASE (2013-12-11)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-726] - Fixed classname references in namespace XSDs.
|
||||
* [DATAMONGO-788] - Projection operations do not render synthetic fields properly.
|
||||
* [DATAMONGO-795] - When adding custom converters to the mongo template it is possible to get unpredictable behaviour
|
||||
* [DATAMONGO-804] - Fix default annotation attribute value for repositoryImplementationPostfix().
|
||||
* [DATAMONGO-806] - Fixed invalid rendering of id field references.
|
||||
* [DATAMONGO-768] - Improve documentation of how to use @PersistenceConstructor
|
||||
|
||||
** Improvement
|
||||
* [DATAMONGO-791] - Added newAggregation(…) overloads to accept a List.
|
||||
* [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x
|
||||
* [DATAMONGO-800] - Improved AuditingIntegrationTests.
|
||||
** Task
|
||||
* [DATAMONGO-810] - Release 1.3.3
|
||||
|
||||
Changes in version 1.4.0.M1 (2013-11-19)
|
||||
----------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-534] - The GridFs query execution does not return sorted resources, when the sorting fields are defined in the query definition
|
||||
* [DATAMONGO-630] - Add support of $setOnInsert modifier for upsert
|
||||
* [DATAMONGO-746] - IndexInfo cannot be read for indices created via mongo shell
|
||||
* [DATAMONGO-752] - QueryMapper prevents searching for values that start with a $ [dollarsign]
|
||||
* [DATAMONGO-753] - Add support for nested field references in group operations
|
||||
* [DATAMONGO-758] - Reject excludes other than _id in projection operations
|
||||
* [DATAMONGO-759] - Render group operation without non synthetic fields correctly.
|
||||
* [DATAMONGO-761] - ClassCastException in SpringDataMongodbSerializer.getKeyForPath
|
||||
* [DATAMONGO-768] - Improve documentation of how to use @PersistenceConstructor
|
||||
* [DATAMONGO-788] - Projection operations do not render synthetic fields properly.
|
||||
* [DATAMONGO-789] - Support login via different (e.g. admin) authentication database
|
||||
* [DATAMONGO-795] - When adding custom converters to the mongo template it is possible to get unpredictable behaviour
|
||||
** Improvement
|
||||
* [DATAMONGO-757] - Projections should follow mongodb conventions more precisely.
|
||||
* [DATAMONGO-764] - Add support for SSL connections to Mongo
|
||||
* [DATAMONGO-766] - Allow nested field references on properties through e.g. @Field("a.b")
|
||||
* [DATAMONGO-769] - Support arithmetic operators for properties
|
||||
* [DATAMONGO-770] - Repository - findBy<Field>IgnoreCase doesnt work
|
||||
* [DATAMONGO-771] - Saving raw JSON through MongoTemplate.insert(…) fails
|
||||
* [DATAMONGO-774] - Support SpEL expressions to define arithmetical projection operations in the aggregation framework
|
||||
* [DATAMONGO-776] - TypeBasedAggregationOperationContext should use MappingContext.getPersistentPropertyPath(String, Class<?>)
|
||||
* [DATAMONGO-780] - Add support for nested repositories
|
||||
* [DATAMONGO-782] - Typo in reference documentation
|
||||
* [DATAMONGO-785] - Add support for geospatial 2Dsphere and geohaystack index types
|
||||
* [DATAMONGO-787] - Upgrade to Spring 3.2.4
|
||||
* [DATAMONGO-791] - make newAggregation() method to accept list
|
||||
* [DATAMONGO-793] - Adapt to changes in Spring Data Commons triggered by repository initialization changes
|
||||
* [DATAMONGO-800] - AuditingIntegrationTests fail on fast machines
|
||||
** New Feature
|
||||
* [DATAMONGO-348] - Lazy Load for DbRef
|
||||
* [DATAMONGO-653] - Support for index operations in GridFsOperations
|
||||
* [DATAMONGO-760] - Add support for custom findAll Queries
|
||||
* [DATAMONGO-792] - Add support to configure Auditing via JavaConfig.
|
||||
** Task
|
||||
* [DATAMONGO-777] - Upgrade to Mongo Java Driver in 2.11
|
||||
|
||||
Changes in version 1.3.2.RELEASE (2013-10-25)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-746] IndexInfo cannot be read for indices created via mongo shell
|
||||
* [DATAMONGO-752] QueryMapper prevents searching for values that start with a $ [dollarsign]
|
||||
* [DATAMONGO-753] Add support for nested field references in group operations
|
||||
* [DATAMONGO-758] Reject excludes other than _id in projection operations
|
||||
* [DATAMONGO-759] Render group operation without non synthetic fields correctly.
|
||||
* [DATAMONGO-761] ClassCastException in SpringDataMongodbSerializer.getKeyForPath
|
||||
* [DATAMONGO-768] Improve documentation of how to use @PersistenceConstructor
|
||||
|
||||
** Improvement
|
||||
* [DATAMONGO-757] - Projections should follow mongodb conventions more precisely.
|
||||
* [DATAMONGO-769] - Support arithmetic operators for properties
|
||||
* [DATAMONGO-771] - Saving raw JSON through MongoTemplate.insert(…) fails
|
||||
** Task
|
||||
* [DATAMONGO-772] - Release 1.3.2
|
||||
|
||||
Changes in version 1.3.1.RELEASE (2013-09-09)
|
||||
---------------------------------------------
|
||||
** Task
|
||||
* [DATAMONGO-751] Upgraded to Spring Data Commons 1.6.1.
|
||||
|
||||
Changes in version 1.3.0.RELEASE (2013-09-09)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Spring Data Document 1.3.0 RELEASE
|
||||
Copyright (c) [2010-2013] Pivotal Inc.
|
||||
Spring Data MongoDB 1.3.5.RELEASE
|
||||
Copyright (c) [2010-2014] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
You may not use this product except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
SPRING DATA MongoDB 1.3.0.RELEASE
|
||||
-----------------------------
|
||||
Spring Data MongoDB 1.3.5.RELEASE
|
||||
---------------------------------
|
||||
|
||||
Spring Data MongoDB is released under the terms of the Apache Software License Version 2.0 (see license.txt).
|
||||
|
||||
@@ -13,5 +13,5 @@ The reference manual and javadoc are located in the 'docs' directory.
|
||||
|
||||
ADDITIONAL RESOURCES:
|
||||
|
||||
Spring Data Homepage: http://www.springsource.org/spring-data
|
||||
Spring Data Forum: http://forum.springsource.org/forumdisplay.php?f=80
|
||||
Spring Data Homepage: http://projects.spring.io/spring-data
|
||||
Spring Data Forum: http://forum.spring.io/forum/spring-projects/data/nosql
|
||||
|
||||
Reference in New Issue
Block a user