Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f9e20d12b2 | ||
|
|
4d6152c65e | ||
|
|
d81cc53c12 | ||
|
|
af4b84ea43 | ||
|
|
f9110828bc | ||
|
|
f301837be5 | ||
|
|
4d29d937eb | ||
|
|
86c11bc614 | ||
|
|
be34b4e503 | ||
|
|
ebfa2c5689 | ||
|
|
b245ef2d9e | ||
|
|
5ef40d54bc | ||
|
|
c679dba438 | ||
|
|
fd6e4000b5 | ||
|
|
c12a27a8f8 | ||
|
|
df2184f204 | ||
|
|
e9c8644d23 | ||
|
|
c730b8f479 | ||
|
|
f3b31fc467 | ||
|
|
f778b2554c | ||
|
|
9d292f64b9 | ||
|
|
8ab038f83c | ||
|
|
689552c28e | ||
|
|
9ea9912b23 | ||
|
|
a952ce5d2b | ||
|
|
b88d960893 | ||
|
|
e44d1f5f9a |
1
CONTRIBUTING.MD
Normal file
1
CONTRIBUTING.MD
Normal file
@@ -0,0 +1 @@
|
||||
You find the contribution guidelines for Spring Data projects [here](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md).
|
||||
@@ -26,7 +26,7 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.3.2.RELEASE</version>
|
||||
<version>1.3.3.RELEASE</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
|
||||
31
pom.xml
31
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.4.0.RC1</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.3.0.M1</version>
|
||||
<version>1.3.0.RC1</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,8 +29,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.7.0.M1</springdata.commons>
|
||||
<mongo>2.11.3</mongo>
|
||||
<springdata.commons>1.7.0.RC1</springdata.commons>
|
||||
<mongo>2.11.4</mongo>
|
||||
</properties>
|
||||
|
||||
<developers>
|
||||
@@ -89,6 +89,17 @@
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
<developer>
|
||||
<id>cstrobl</id>
|
||||
<name>Christoph Strobl</name>
|
||||
<email>cstrobl at gopivotal.com</email>
|
||||
<organization>Pivotal</organization>
|
||||
<organizationUrl>http://www.gopivotal.com</organizationUrl>
|
||||
<roles>
|
||||
<role>Developer</role>
|
||||
</roles>
|
||||
<timezone>+1</timezone>
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<dependencies>
|
||||
@@ -101,10 +112,22 @@
|
||||
</dependencies>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>http://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>http://repo.springsource.org/libs-milestone-local</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>http://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -6,12 +6,12 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.4.0.RC1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Persistence Support</name>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>1.0.0.Final</jpa>
|
||||
@@ -24,7 +24,6 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<version>${spring}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
@@ -35,24 +34,21 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.4.0.RC1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -41,17 +41,13 @@ import com.mongodb.MongoException;
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
protected final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
@@ -113,12 +109,14 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
|
||||
log.debug("getPersistentId called on " + entity);
|
||||
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
Object o = entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
return o;
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.4.0.RC1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.4.0.RC1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.4.0.M1</version>
|
||||
<version>1.4.0.RC1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -26,22 +26,18 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>${spring}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
@@ -52,7 +48,6 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-expression</artifactId>
|
||||
<version>${spring}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
|
||||
@@ -28,6 +28,7 @@ import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.CustomConversions;
|
||||
@@ -101,7 +102,7 @@ public abstract class AbstractMongoConfiguration {
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public SimpleMongoDbFactory mongoDbFactory() throws Exception {
|
||||
public MongoDbFactory mongoDbFactory() throws Exception {
|
||||
return new SimpleMongoDbFactory(mongo(), getDatabaseName(), getUserCredentials(), getAuthenticationDatabaseName());
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -57,6 +57,7 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
String converterRef = element.getAttribute("converter-ref");
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
String bucket = element.getAttribute("bucket");
|
||||
|
||||
BeanDefinitionBuilder gridFsTemplateBuilder = BeanDefinitionBuilder.genericBeanDefinition(GridFsTemplate.class);
|
||||
|
||||
@@ -72,6 +73,10 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
gridFsTemplateBuilder.addConstructorArgReference(BeanNames.DEFAULT_CONVERTER_BEAN_NAME);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(bucket)) {
|
||||
gridFsTemplateBuilder.addConstructorArgValue(bucket);
|
||||
}
|
||||
|
||||
return (AbstractBeanDefinition) helper.getComponentIdButFallback(gridFsTemplateBuilder, BeanNames.GRID_FS_TEMPLATE)
|
||||
.getBeanDefinition();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -75,7 +75,7 @@ import org.w3c.dom.Element;
|
||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
private static final String BASE_PACKAGE = "base-package";
|
||||
private static final boolean jsr303Present = ClassUtils.isPresent("javax.validation.Validator",
|
||||
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator",
|
||||
MappingMongoConverterParser.class.getClassLoader());
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -166,7 +166,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
private RuntimeBeanReference getValidator(Object source, ParserContext parserContext) {
|
||||
|
||||
if (!jsr303Present) {
|
||||
if (!JSR_303_PRESENT) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -195,7 +195,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
BeanDefinitionBuilder mappingContextBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoMappingContext.class);
|
||||
|
||||
Set<String> classesToAdd = getInititalEntityClasses(element, mappingContextBuilder);
|
||||
Set<String> classesToAdd = getInititalEntityClasses(element);
|
||||
|
||||
if (classesToAdd != null) {
|
||||
mappingContextBuilder.addPropertyValue("initialEntitySet", classesToAdd);
|
||||
}
|
||||
@@ -262,7 +263,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return null;
|
||||
}
|
||||
|
||||
private static Set<String> getInititalEntityClasses(Element element, BeanDefinitionBuilder builder) {
|
||||
private static Set<String> getInititalEntityClasses(Element element) {
|
||||
|
||||
String basePackage = element.getAttribute(BASE_PACKAGE);
|
||||
|
||||
|
||||
@@ -16,12 +16,14 @@
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ServerAddress;
|
||||
@@ -35,6 +37,11 @@ import com.mongodb.ServerAddress;
|
||||
*/
|
||||
public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
@@ -77,22 +84,53 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
*/
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
String[] hostAndPort = StringUtils.delimitedListToStringArray(source.trim(), ":");
|
||||
if (!StringUtils.hasText(source)) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(source) || hostAndPort.length > 2) {
|
||||
LOG.warn("Could not parse address source '{}'. Check your replica set configuration!", source);
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return hostAndPort.length == 1 ? new ServerAddress(hostAndPort[0]) : new ServerAddress(hostAndPort[0],
|
||||
Integer.parseInt(hostAndPort[1]));
|
||||
InetAddress hostAddress = InetAddress.getByName(hostAndPort[0]);
|
||||
Integer port = hostAndPort.length == 1 ? null : Integer.parseInt(hostAndPort[1]);
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
LOG.warn("Could not parse host '{}'. Check your replica set configuration!", hostAndPort[0]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
LOG.warn("Could not parse port '{}'. Check your replica set configuration!", hostAndPort[1]);
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the host and port from the given {@link String}.
|
||||
*
|
||||
* @param addressAndPortSource must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
||||
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null!");
|
||||
|
||||
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
||||
String hostAddress = hostAndPort[0];
|
||||
|
||||
if (isHostAddressInIPv6BracketNotation(hostAddress)) {
|
||||
hostAndPort[0] = hostAddress.substring(1, hostAddress.length() - 1);
|
||||
}
|
||||
|
||||
return hostAndPort;
|
||||
}
|
||||
|
||||
private boolean isHostAddressInIPv6BracketNotation(String hostAddress) {
|
||||
return hostAddress.startsWith("[") && hostAddress.endsWith("]");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,34 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
|
||||
/**
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public interface MongoAdminOperations {
|
||||
|
||||
@ManagedOperation
|
||||
public abstract void dropDatabase(String databaseName);
|
||||
void dropDatabase(String databaseName);
|
||||
|
||||
@ManagedOperation
|
||||
public abstract void createDatabase(String databaseName);
|
||||
void createDatabase(String databaseName);
|
||||
|
||||
@ManagedOperation
|
||||
public abstract String getDatabaseStats(String databaseName);
|
||||
|
||||
}
|
||||
String getDatabaseStats(String databaseName);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,27 +30,28 @@ import com.mongodb.MongoOptions;
|
||||
* @author Mike Saavedra
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, InitializingBean {
|
||||
|
||||
@SuppressWarnings("deprecation")//
|
||||
private final MongoOptions MONGO_OPTIONS = new MongoOptions();
|
||||
private static final MongoOptions DEFAULT_MONGO_OPTIONS = new MongoOptions();
|
||||
|
||||
private int connectionsPerHost = MONGO_OPTIONS.connectionsPerHost;
|
||||
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier;
|
||||
private int maxWaitTime = MONGO_OPTIONS.maxWaitTime;
|
||||
private int connectTimeout = MONGO_OPTIONS.connectTimeout;
|
||||
private int socketTimeout = MONGO_OPTIONS.socketTimeout;
|
||||
private boolean socketKeepAlive = MONGO_OPTIONS.socketKeepAlive;
|
||||
private boolean autoConnectRetry = MONGO_OPTIONS.autoConnectRetry;
|
||||
private long maxAutoConnectRetryTime = MONGO_OPTIONS.maxAutoConnectRetryTime;
|
||||
private int writeNumber;
|
||||
private int writeTimeout;
|
||||
private boolean writeFsync;
|
||||
@SuppressWarnings("deprecation") private boolean slaveOk = MONGO_OPTIONS.slaveOk;
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.connectionsPerHost;
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier;
|
||||
private int maxWaitTime = DEFAULT_MONGO_OPTIONS.maxWaitTime;
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.connectTimeout;
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.socketTimeout;
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.socketKeepAlive;
|
||||
private boolean autoConnectRetry = DEFAULT_MONGO_OPTIONS.autoConnectRetry;
|
||||
private long maxAutoConnectRetryTime = DEFAULT_MONGO_OPTIONS.maxAutoConnectRetryTime;
|
||||
private int writeNumber = DEFAULT_MONGO_OPTIONS.w;
|
||||
private int writeTimeout = DEFAULT_MONGO_OPTIONS.wtimeout;
|
||||
private boolean writeFsync = DEFAULT_MONGO_OPTIONS.fsync;
|
||||
private boolean slaveOk = DEFAULT_MONGO_OPTIONS.slaveOk;
|
||||
private boolean ssl;
|
||||
private SSLSocketFactory sslSocketFactory;
|
||||
|
||||
private MongoOptions options;
|
||||
|
||||
/**
|
||||
* Configures the maximum number of connections allowed per host until we will block.
|
||||
*
|
||||
@@ -197,24 +198,28 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public void afterPropertiesSet() {
|
||||
|
||||
MONGO_OPTIONS.connectionsPerHost = connectionsPerHost;
|
||||
MONGO_OPTIONS.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
MONGO_OPTIONS.maxWaitTime = maxWaitTime;
|
||||
MONGO_OPTIONS.connectTimeout = connectTimeout;
|
||||
MONGO_OPTIONS.socketTimeout = socketTimeout;
|
||||
MONGO_OPTIONS.socketKeepAlive = socketKeepAlive;
|
||||
MONGO_OPTIONS.autoConnectRetry = autoConnectRetry;
|
||||
MONGO_OPTIONS.maxAutoConnectRetryTime = maxAutoConnectRetryTime;
|
||||
MONGO_OPTIONS.slaveOk = slaveOk;
|
||||
MONGO_OPTIONS.w = writeNumber;
|
||||
MONGO_OPTIONS.wtimeout = writeTimeout;
|
||||
MONGO_OPTIONS.fsync = writeFsync;
|
||||
MongoOptions options = new MongoOptions();
|
||||
|
||||
options.connectionsPerHost = connectionsPerHost;
|
||||
options.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
options.maxWaitTime = maxWaitTime;
|
||||
options.connectTimeout = connectTimeout;
|
||||
options.socketTimeout = socketTimeout;
|
||||
options.socketKeepAlive = socketKeepAlive;
|
||||
options.autoConnectRetry = autoConnectRetry;
|
||||
options.maxAutoConnectRetryTime = maxAutoConnectRetryTime;
|
||||
options.slaveOk = slaveOk;
|
||||
options.w = writeNumber;
|
||||
options.wtimeout = writeTimeout;
|
||||
options.fsync = writeFsync;
|
||||
|
||||
if (ssl) {
|
||||
MONGO_OPTIONS.setSocketFactory(sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault());
|
||||
options.setSocketFactory(sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault());
|
||||
}
|
||||
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -222,7 +227,7 @@ public class MongoOptionsFactoryBean implements FactoryBean<MongoOptions>, Initi
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
public MongoOptions getObject() {
|
||||
return MONGO_OPTIONS;
|
||||
return this.options;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -127,6 +127,7 @@ import com.mongodb.util.JSONParseException;
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Chuong Ngo
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@@ -369,12 +370,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
Assert.notNull(query);
|
||||
|
||||
DBObject queryObject = query.getQueryObject();
|
||||
DBObject queryObject = queryMapper.getMappedObject(query.getQueryObject(), null);
|
||||
DBObject sortObject = query.getSortObject();
|
||||
DBObject fieldsObject = query.getFieldsObject();
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: $s",
|
||||
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObject), sortObject, fieldsObject, collectionName));
|
||||
}
|
||||
|
||||
@@ -996,6 +997,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
DBObject queryObj = query == null ? new BasicDBObject() : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
entity);
|
||||
DBObject updateObj = update == null ? new BasicDBObject() : updateMapper.getMappedObject(
|
||||
@@ -1025,6 +1028,17 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
});
|
||||
}
|
||||
|
||||
private void increaseVersionForUpdateIfNecessary(MongoPersistentEntity<?> persistentEntity, Update update) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
|
||||
String versionPropertyField = persistentEntity.getVersionProperty().getFieldName();
|
||||
if (!update.getUpdateObject().containsField(versionPropertyField)) {
|
||||
update.inc(versionPropertyField, 1L);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void remove(Object object) {
|
||||
|
||||
if (object == null) {
|
||||
@@ -1167,6 +1181,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
public <T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction,
|
||||
String reduceFunction, MapReduceOptions mapReduceOptions, Class<T> entityClass) {
|
||||
|
||||
String mapFunc = replaceWithResourceIfNecessary(mapFunction);
|
||||
String reduceFunc = replaceWithResourceIfNecessary(reduceFunction);
|
||||
DBCollection inputCollection = getCollection(inputCollectionName);
|
||||
@@ -1191,12 +1206,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
MapReduceOutput mapReduceOutput = new MapReduceOutput(inputCollection, commandObject, commandResult);
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
|
||||
for (DBObject dbObject : mapReduceOutput.results()) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
|
||||
MapReduceResults<T> mapReduceResult = new MapReduceResults<T>(mappedResults, commandResult);
|
||||
return mapReduceResult;
|
||||
return new MapReduceResults<T>(mappedResults, commandResult);
|
||||
}
|
||||
|
||||
public <T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass) {
|
||||
@@ -1250,15 +1265,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("retval");
|
||||
|
||||
List<T> mappedResults = new ArrayList<T>();
|
||||
DbObjectCallback<T> callback = new ReadDbObjectCallback<T>(mongoConverter, entityClass);
|
||||
|
||||
for (DBObject dbObject : resultSet) {
|
||||
mappedResults.add(callback.doWith(dbObject));
|
||||
}
|
||||
GroupByResults<T> groupByResult = new GroupByResults<T>(mappedResults, commandResult);
|
||||
return groupByResult;
|
||||
|
||||
return new GroupByResults<T>(mappedResults, commandResult);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1551,8 +1565,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
DBObject mappedUpdate = queryMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
DBObject mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,7 +32,7 @@ import org.springframework.util.CompositeIterator;
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ExposedFields implements Iterable<ExposedField> {
|
||||
public final class ExposedFields implements Iterable<ExposedField> {
|
||||
|
||||
private static final List<ExposedField> NO_FIELDS = Collections.emptyList();
|
||||
private static final ExposedFields EMPTY = new ExposedFields(NO_FIELDS, NO_FIELDS);
|
||||
@@ -343,13 +343,6 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
public boolean isSynthetic() {
|
||||
return field.synthetic;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the raw, unqualified reference, i.e. the field reference without a {@literal $} prefix.
|
||||
*
|
||||
@@ -361,6 +354,16 @@ public class ExposedFields implements Iterable<ExposedField> {
|
||||
return field.synthetic ? target : String.format("%s.%s", Fields.UNDERSCORE_ID, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the referenve value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Object getReferenceValue() {
|
||||
return field.synthetic && !field.isAliased() ? 1 : toString();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,13 +32,13 @@ import org.springframework.util.StringUtils;
|
||||
* @author Oliver Gierke
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Fields implements Iterable<Field> {
|
||||
public final class Fields implements Iterable<Field> {
|
||||
|
||||
private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s! Please "
|
||||
+ "customize your field definitions to get to unique field names!";
|
||||
|
||||
public static String UNDERSCORE_ID = "_id";
|
||||
public static String UNDERSCORE_ID_REF = "$_id";
|
||||
public static final String UNDERSCORE_ID = "_id";
|
||||
public static final String UNDERSCORE_ID_REF = "$_id";
|
||||
|
||||
private final List<Field> fields;
|
||||
|
||||
@@ -203,7 +203,7 @@ public class Fields implements Iterable<Field> {
|
||||
Assert.hasText(nameToSet, "AggregationField name must not be null or empty!");
|
||||
|
||||
if (target == null && name.contains(".")) {
|
||||
this.name = nameToSet.substring(nameToSet.indexOf(".") + 1);
|
||||
this.name = nameToSet.substring(nameToSet.indexOf('.') + 1);
|
||||
this.target = nameToSet;
|
||||
} else {
|
||||
this.name = nameToSet;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -99,7 +99,7 @@ public class GroupOperation implements FieldsExposingAggregationOperation {
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GroupOperationBuilder {
|
||||
public static final class GroupOperationBuilder {
|
||||
|
||||
private final GroupOperation groupOperation;
|
||||
private final Operation operation;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,7 +21,6 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -253,7 +252,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
public ExpressionProjectionOperationBuilder(Object value, ProjectionOperation operation, Object[] parameters) {
|
||||
|
||||
super(value, operation);
|
||||
this.params = parameters;
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -295,7 +294,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
Assert.notNull(parameters, "Parameters must not be null!");
|
||||
|
||||
this.expression = expression;
|
||||
this.params = parameters;
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -317,6 +316,9 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public static class ProjectionOperationBuilder extends AbstractProjectionOperationBuilder {
|
||||
|
||||
private static final String NUMBER_NOT_NULL = "Number must not be null!";
|
||||
private static final String FIELD_REFERENCE_NOT_NULL = "Field reference must not be null!";
|
||||
|
||||
private final String name;
|
||||
private final ProjectionOperation operation;
|
||||
private final OperationProjection previousProjection;
|
||||
@@ -383,7 +385,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder plus(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
return project("add", number);
|
||||
}
|
||||
|
||||
@@ -420,7 +422,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder minus(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("subtract", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
@@ -432,7 +434,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
return project("multiply", number);
|
||||
}
|
||||
|
||||
@@ -445,7 +447,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder multiply(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("multiply", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
@@ -457,7 +459,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, FIELD_REFERENCE_NOT_NULL);
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("divide", number);
|
||||
}
|
||||
@@ -471,7 +473,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder divide(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("divide", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
@@ -484,7 +486,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(Number number) {
|
||||
|
||||
Assert.notNull(number, "Number must not be null!");
|
||||
Assert.notNull(number, NUMBER_NOT_NULL);
|
||||
Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!");
|
||||
return project("mod", number);
|
||||
}
|
||||
@@ -498,7 +500,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*/
|
||||
public ProjectionOperationBuilder mod(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "Field reference must not be null!");
|
||||
Assert.notNull(fieldReference, FIELD_REFERENCE_NOT_NULL);
|
||||
return project("mod", Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
@@ -626,8 +628,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
if (value == null || Boolean.TRUE.equals(value)) {
|
||||
|
||||
// check whether referenced field exists in the context
|
||||
FieldReference reference = context.getReference(field.getTarget());
|
||||
return reference.isSynthetic() && !field.isAliased() ? 1 : reference.toString();
|
||||
return context.getReference(field).getReferenceValue();
|
||||
|
||||
} else if (Boolean.FALSE.equals(value)) {
|
||||
|
||||
|
||||
@@ -88,14 +88,16 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(String name) {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(name, type);
|
||||
|
||||
return getReferenceFor(field(propertyPath.getLeafProperty().getName(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)));
|
||||
return getReferenceFor(field(name));
|
||||
}
|
||||
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
return new FieldReference(new ExposedField(field, true));
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(
|
||||
field.getTarget(), type);
|
||||
Field mappedField = field(propertyPath.getLeafProperty().getName(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE));
|
||||
|
||||
return new FieldReference(new ExposedField(mappedField, true));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,18 +31,30 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
class DBObjectPropertyAccessor extends MapAccessor {
|
||||
|
||||
static MapAccessor INSTANCE = new DBObjectPropertyAccessor();
|
||||
static final MapAccessor INSTANCE = new DBObjectPropertyAccessor();
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#getSpecificTargetClasses()
|
||||
*/
|
||||
@Override
|
||||
public Class<?>[] getSpecificTargetClasses() {
|
||||
return new Class[] { DBObject.class };
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#canRead(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public boolean canRead(EvaluationContext context, Object target, String name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.expression.MapAccessor#read(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public TypedValue read(EvaluationContext context, Object target, String name) {
|
||||
@@ -52,4 +64,4 @@ class DBObjectPropertyAccessor extends MapAccessor {
|
||||
Object value = source.get(name);
|
||||
return value == null ? TypedValue.NULL : new TypedValue(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
* Copyright 2011-2014 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -74,7 +74,7 @@ import com.mongodb.DBRef;
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
protected static final Logger log = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
|
||||
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
protected final SpelExpressionParser spelExpressionParser = new SpelExpressionParser();
|
||||
@@ -668,7 +668,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*/
|
||||
protected void addCustomTypeKeyIfNecessary(TypeInformation<?> type, Object value, DBObject dbObject) {
|
||||
|
||||
TypeInformation<?> actualType = type != null ? type.getActualType() : type;
|
||||
TypeInformation<?> actualType = type != null ? type.getActualType() : null;
|
||||
Class<?> reference = actualType == null ? Object.class : actualType.getType();
|
||||
|
||||
boolean notTheSameClass = !value.getClass().equals(reference);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import java.util.Set;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
@@ -30,6 +31,7 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -44,6 +46,7 @@ import com.mongodb.DBRef;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
@@ -101,7 +104,7 @@ public class QueryMapper {
|
||||
continue;
|
||||
}
|
||||
|
||||
Field field = entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
|
||||
Object rawValue = query.get(key);
|
||||
String newKey = field.getMappedKey();
|
||||
@@ -117,6 +120,17 @@ public class QueryMapper {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param entity
|
||||
* @param key
|
||||
* @param mappingContext
|
||||
* @return
|
||||
*/
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link DBObject} representing a keyword by mapping the keyword's value.
|
||||
*
|
||||
@@ -176,20 +190,22 @@ public class QueryMapper {
|
||||
|
||||
if (value instanceof DBObject) {
|
||||
DBObject valueDbo = (DBObject) value;
|
||||
DBObject resultDbo = new BasicDBObject(valueDbo.toMap());
|
||||
|
||||
if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<Object>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
valueDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
resultDbo.put(inKey, ids.toArray(new Object[ids.size()]));
|
||||
} else if (valueDbo.containsField("$ne")) {
|
||||
valueDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject((DBObject) value, null);
|
||||
return getMappedObject(resultDbo, null);
|
||||
}
|
||||
|
||||
return valueDbo;
|
||||
return resultDbo;
|
||||
|
||||
} else {
|
||||
return convertId(value);
|
||||
@@ -200,13 +216,28 @@ public class QueryMapper {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
}
|
||||
|
||||
if (documentField.isAssociation()) {
|
||||
if (isAssociationConversionNecessary(documentField, value)) {
|
||||
return convertAssociation(value, documentField.getProperty());
|
||||
}
|
||||
|
||||
return convertSimpleOrDBObject(value, documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Field} represents an association reference that together with the given value
|
||||
* requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the
|
||||
* type of the given value is compatible with the type of the given document field in order to deal with potential
|
||||
* query field exclusions, since MongoDB uses the {@code int} {@literal 0} as an indicator for an excluded field.
|
||||
*
|
||||
* @param documentField
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
private boolean isAssociationConversionNecessary(Field documentField, Object value) {
|
||||
return documentField.isAssociation() && value != null
|
||||
&& documentField.getProperty().getActualType().isAssignableFrom(value.getClass());
|
||||
}
|
||||
|
||||
/**
|
||||
* Retriggers mapping if the given source is a {@link DBObject} or simply invokes the
|
||||
*
|
||||
@@ -248,7 +279,8 @@ public class QueryMapper {
|
||||
*/
|
||||
private Object convertAssociation(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (property == null || !property.isAssociation()) {
|
||||
if (property == null || !property.isAssociation() || source == null || source instanceof DBRef
|
||||
|| !property.isEntity()) {
|
||||
return source;
|
||||
}
|
||||
|
||||
@@ -270,7 +302,7 @@ public class QueryMapper {
|
||||
return result;
|
||||
}
|
||||
|
||||
return source == null || source instanceof DBRef ? source : converter.toDBRef(source, property);
|
||||
return converter.toDBRef(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -381,7 +413,7 @@ public class QueryMapper {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class Field {
|
||||
protected static class Field {
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
|
||||
@@ -458,12 +490,14 @@ public class QueryMapper {
|
||||
* Extension of {@link DocumentField} to be backed with mapping metadata.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
private static class MetadataBackedField extends Field {
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistentPropertyPath<MongoPersistentProperty> path;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given name, {@link MongoPersistentEntity} and
|
||||
@@ -483,7 +517,7 @@ public class QueryMapper {
|
||||
this.entity = entity;
|
||||
this.mappingContext = context;
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
this.path = getPath(name);
|
||||
this.property = path == null ? null : path.getLeafProperty();
|
||||
}
|
||||
|
||||
@@ -548,19 +582,33 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public String getMappedKey() {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = getPath(name);
|
||||
return path == null ? name : path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE);
|
||||
return path == null ? name : path.toDotPath(getPropertyConverter());
|
||||
}
|
||||
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String name) {
|
||||
/**
|
||||
* Returns the {@link PersistentPropertyPath} for the given <code>pathExpression</code>.
|
||||
*
|
||||
* @param pathExpression
|
||||
* @return
|
||||
*/
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
try {
|
||||
PropertyPath path = PropertyPath.from(name, entity.getTypeInformation());
|
||||
PropertyPath path = PropertyPath.from(pathExpression, entity.getTypeInformation());
|
||||
return mappingContext.getPersistentPropertyPath(path);
|
||||
} catch (PropertyReferenceException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Converter} to be used to created the mapped key. Default implementation will use
|
||||
* {@link PropertyToFieldNameConverter}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,12 +15,21 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* A subclass of {@link QueryMapper} that retains type information on the mongo types.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class UpdateMapper extends QueryMapper {
|
||||
|
||||
@@ -49,4 +58,90 @@ public class UpdateMapper extends QueryMapper {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext)
|
||||
*/
|
||||
@Override
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext) : //
|
||||
new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MetadataBackedField} that handles {@literal $} paths inside a field key. We clean up an update key
|
||||
* containing a {@literal $} before handing it to the super class to make sure property lookups and transformations
|
||||
* continue to work as expected. We provide a custom property converter to re-applied the cleaned up {@literal $}s
|
||||
* when constructing the mapped key.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class MetadataBackedUpdateField extends MetadataBackedField {
|
||||
|
||||
private final String key;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MetadataBackedField} with the given {@link MongoPersistentEntity}, key and
|
||||
* {@link MappingContext}. We clean up the key before handing it up to the super class to make sure it continues to
|
||||
* work as expected.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$", ""), entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getPropertyConverter()
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new UpdatePropertyConverter(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Special {@link Converter} for {@link MongoPersistentProperty} instances that will concatenate the {@literal $}
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
*
|
||||
* @param updateKey must not be {@literal null} or empty.
|
||||
*/
|
||||
public UpdatePropertyConverter(String updateKey) {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(updateKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
return iterator.hasNext() && iterator.next().equals("$") ? String.format("%s.$", mappedName) : mappedName;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
/**
|
||||
* Geoposatial index type.
|
||||
*
|
||||
* @author Laurent Canet
|
||||
* @author Oliver Gierke
|
||||
* @since 1.4
|
||||
*/
|
||||
public enum GeoSpatialIndexType {
|
||||
|
||||
/**
|
||||
* Simple 2-Dimensional index for legacy-format points.
|
||||
*/
|
||||
GEO_2D,
|
||||
|
||||
/**
|
||||
* 2D Index for GeoJSON-formatted data over a sphere. Only available in Mongo 2.4.
|
||||
*/
|
||||
GEO_2DSPHERE,
|
||||
|
||||
/**
|
||||
* An haystack index for grouping results over small results.
|
||||
*/
|
||||
GEO_HAYSTACK
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -13,7 +13,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
@@ -24,7 +23,8 @@ import java.lang.annotation.Target;
|
||||
/**
|
||||
* Mark a field to be indexed using MongoDB's geospatial indexing feature.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Jon Brisbin
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -65,4 +65,27 @@ public @interface GeoSpatialIndexed {
|
||||
*/
|
||||
int bits() default 26;
|
||||
|
||||
/**
|
||||
* The type of the geospatial index. Default is {@link GeoSpatialIndexType#GEO_2D}
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
GeoSpatialIndexType type() default GeoSpatialIndexType.GEO_2D;
|
||||
|
||||
/**
|
||||
* The bucket size for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes, in coordinate units.
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
double bucketSize() default 1.0;
|
||||
|
||||
/**
|
||||
* The name of the additional field to use for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes
|
||||
*
|
||||
* @since 1.4
|
||||
* @return
|
||||
*/
|
||||
String additionalField() default "";
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -25,14 +26,18 @@ import com.mongodb.DBObject;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
public class GeospatialIndex implements IndexDefinition {
|
||||
|
||||
private final String field;
|
||||
private String name;
|
||||
private Integer min = null;
|
||||
private Integer max = null;
|
||||
private Integer bits = null;
|
||||
private Integer min;
|
||||
private Integer max;
|
||||
private Integer bits;
|
||||
private GeoSpatialIndexType type = GeoSpatialIndexType.GEO_2D;
|
||||
private Double bucketSize = 1.0;
|
||||
private String additionalField;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeospatialIndex} for the given field.
|
||||
@@ -40,52 +45,146 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
* @param field must not be empty or {@literal null}.
|
||||
*/
|
||||
public GeospatialIndex(String field) {
|
||||
Assert.hasText(field);
|
||||
|
||||
Assert.hasText(field, "Field must have text!");
|
||||
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex named(String name) {
|
||||
|
||||
Assert.hasText(name, "Name must have text!");
|
||||
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param min
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withMin(int min) {
|
||||
this.min = Integer.valueOf(min);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param max
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withMax(int max) {
|
||||
this.max = Integer.valueOf(max);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param bits
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withBits(int bits) {
|
||||
this.bits = Integer.valueOf(bits);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param type must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex typed(GeoSpatialIndexType type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param bucketSize
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withBucketSize(double bucketSize) {
|
||||
this.bucketSize = bucketSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fieldName.
|
||||
* @return
|
||||
*/
|
||||
public GeospatialIndex withAdditionalField(String fieldName) {
|
||||
this.additionalField = fieldName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
dbo.put(field, "2d");
|
||||
|
||||
switch (type) {
|
||||
|
||||
case GEO_2D:
|
||||
dbo.put(field, "2d");
|
||||
break;
|
||||
|
||||
case GEO_2DSPHERE:
|
||||
dbo.put(field, "2dsphere");
|
||||
break;
|
||||
|
||||
case GEO_HAYSTACK:
|
||||
dbo.put(field, "geoHaystack");
|
||||
if (!StringUtils.hasText(additionalField)) {
|
||||
throw new IllegalArgumentException("When defining geoHaystack index, an additionnal field must be defined");
|
||||
}
|
||||
dbo.put(additionalField, 1);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported geospatial index " + type);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
public DBObject getIndexOptions() {
|
||||
if (name == null && min == null && max == null) {
|
||||
|
||||
if (name == null && min == null && max == null && bucketSize == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
if (name != null) {
|
||||
dbo.put("name", name);
|
||||
}
|
||||
if (min != null) {
|
||||
dbo.put("min", min);
|
||||
}
|
||||
if (max != null) {
|
||||
dbo.put("max", max);
|
||||
}
|
||||
if (bits != null) {
|
||||
dbo.put("bits", bits);
|
||||
|
||||
switch (type) {
|
||||
|
||||
case GEO_2D:
|
||||
|
||||
if (min != null) {
|
||||
dbo.put("min", min);
|
||||
}
|
||||
if (max != null) {
|
||||
dbo.put("max", max);
|
||||
}
|
||||
if (bits != null) {
|
||||
dbo.put("bits", bits);
|
||||
}
|
||||
break;
|
||||
|
||||
case GEO_2DSPHERE:
|
||||
|
||||
break;
|
||||
|
||||
case GEO_HAYSTACK:
|
||||
|
||||
if (bucketSize != null) {
|
||||
dbo.put("bucketSize", bucketSize);
|
||||
}
|
||||
break;
|
||||
}
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -45,11 +45,12 @@ import com.mongodb.util.JSON;
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Johno Crawford
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
|
||||
private final Map<Class<?>, Boolean> classesSeen = new ConcurrentHashMap<Class<?>, Boolean>();
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
@@ -96,8 +97,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
protected void checkForIndexes(final MongoPersistentEntity<?> entity) {
|
||||
final Class<?> type = entity.getType();
|
||||
if (!classesSeen.containsKey(type)) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Analyzing class " + type + " for index information.");
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Analyzing class " + type + " for index information.");
|
||||
}
|
||||
|
||||
// Make sure indexes get created
|
||||
@@ -111,8 +112,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
ensureIndex(indexColl, index.name(), definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created compound index " + index);
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Created compound index " + index);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -132,8 +133,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
} else {
|
||||
if (!name.equals(field.getName()) && index.unique() && !index.sparse()) {
|
||||
// Names don't match, and sparse is not true. This situation will generate an error on the server.
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("The index name " + name + " doesn't match this property name: " + field.getName()
|
||||
if (LOGGER.isWarnEnabled()) {
|
||||
LOGGER.warn("The index name " + name + " doesn't match this property name: " + field.getName()
|
||||
+ ". Setting sparse=true on this index will prevent errors when inserting documents.");
|
||||
}
|
||||
}
|
||||
@@ -146,8 +147,8 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
ensureIndex(collection, name, definition, index.unique(), index.dropDups(), index.sparse(),
|
||||
index.background(), index.expireAfterSeconds());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created property index " + index);
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Created property index " + index);
|
||||
}
|
||||
|
||||
} else if (field.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
@@ -157,13 +158,15 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
GeospatialIndex indexObject = new GeospatialIndex(persistentProperty.getFieldName());
|
||||
indexObject.withMin(index.min()).withMax(index.max());
|
||||
indexObject.named(StringUtils.hasText(index.name()) ? index.name() : field.getName());
|
||||
indexObject.typed(index.type()).withBucketSize(index.bucketSize())
|
||||
.withAdditionalField(index.additionalField());
|
||||
|
||||
String collection = StringUtils.hasText(index.collection()) ? index.collection() : entity.getCollection();
|
||||
mongoDbFactory.getDb().getCollection(collection)
|
||||
.ensureIndex(indexObject.getIndexKeys(), indexObject.getIndexOptions());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(String.format("Created %s for entity %s in collection %s! ", indexObject, entity.getType(),
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Created %s for entity %s in collection %s! ", indexObject, entity.getType(),
|
||||
collection));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,14 +28,14 @@ import org.springframework.util.Assert;
|
||||
*/
|
||||
public class AuditingEventListener implements ApplicationListener<BeforeConvertEvent<Object>> {
|
||||
|
||||
private final IsNewAwareAuditingHandler<Object> auditingHandler;
|
||||
private final IsNewAwareAuditingHandler auditingHandler;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AuditingEventListener} using the given {@link MappingContext} and {@link AuditingHandler}.
|
||||
*
|
||||
* @param auditingHandler must not be {@literal null}.
|
||||
*/
|
||||
public AuditingEventListener(IsNewAwareAuditingHandler<Object> auditingHandler) {
|
||||
public AuditingEventListener(IsNewAwareAuditingHandler auditingHandler) {
|
||||
|
||||
Assert.notNull(auditingHandler, "IsNewAwareAuditingHandler must not be null!");
|
||||
this.auditingHandler = auditingHandler;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2012 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,7 +23,7 @@ package org.springframework.data.mongodb.core.mapreduce;
|
||||
*/
|
||||
public class MapReduceCounts {
|
||||
|
||||
public static MapReduceCounts NONE = new MapReduceCounts(-1, -1, -1);
|
||||
public static final MapReduceCounts NONE = new MapReduceCounts(-1, -1, -1);
|
||||
|
||||
private final long inputCount;
|
||||
private final long emitCount;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -52,11 +52,8 @@ public class Criteria implements CriteriaDefinition {
|
||||
private static final Object NOT_SET = new Object();
|
||||
|
||||
private String key;
|
||||
|
||||
private List<Criteria> criteriaChain;
|
||||
|
||||
private LinkedHashMap<String, Object> criteria = new LinkedHashMap<String, Object>();
|
||||
|
||||
private Object isValue = NOT_SET;
|
||||
|
||||
public Criteria() {
|
||||
@@ -101,13 +98,16 @@ public class Criteria implements CriteriaDefinition {
|
||||
* @return
|
||||
*/
|
||||
public Criteria is(Object o) {
|
||||
if (isValue != NOT_SET) {
|
||||
|
||||
if (!isValue.equals(NOT_SET)) {
|
||||
throw new InvalidMongoDbApiUsageException(
|
||||
"Multiple 'is' values declared. You need to use 'and' with multiple criteria");
|
||||
}
|
||||
|
||||
if (lastOperatorWasNot()) {
|
||||
throw new InvalidMongoDbApiUsageException("Invalid query: 'not' can't be used with 'is' - use 'ne' instead.");
|
||||
}
|
||||
|
||||
this.isValue = o;
|
||||
return this;
|
||||
}
|
||||
@@ -475,8 +475,10 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
protected DBObject getSingleCriteriaObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
boolean not = false;
|
||||
|
||||
for (String k : this.criteria.keySet()) {
|
||||
Object value = this.criteria.get(k);
|
||||
if (not) {
|
||||
@@ -494,12 +496,14 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
DBObject queryCriteria = new BasicDBObject();
|
||||
if (isValue != NOT_SET) {
|
||||
|
||||
if (!NOT_SET.equals(isValue)) {
|
||||
queryCriteria.put(this.key, this.isValue);
|
||||
queryCriteria.putAll(dbo);
|
||||
} else {
|
||||
queryCriteria.put(this.key, dbo);
|
||||
}
|
||||
|
||||
return queryCriteria;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,7 +32,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class NearQuery {
|
||||
public final class NearQuery {
|
||||
|
||||
private final Point point;
|
||||
private Query query;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -41,10 +42,10 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
public class Query {
|
||||
|
||||
private final static String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
|
||||
private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
|
||||
|
||||
private final Set<Class<?>> restrictedTypes = new HashSet<Class<?>>();
|
||||
private LinkedHashMap<String, Criteria> criteria = new LinkedHashMap<String, Criteria>();
|
||||
private final Map<String, Criteria> criteria = new LinkedHashMap<String, Criteria>();
|
||||
private Field fieldSpec;
|
||||
private Sort sort;
|
||||
private int skip;
|
||||
@@ -197,6 +198,7 @@ public class Query {
|
||||
public DBObject getQueryObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
|
||||
for (String k : criteria.keySet()) {
|
||||
CriteriaDefinition c = criteria.get(k);
|
||||
DBObject cl = c.getCriteriaObject();
|
||||
@@ -211,24 +213,19 @@ public class Query {
|
||||
}
|
||||
|
||||
public DBObject getFieldsObject() {
|
||||
if (this.fieldSpec == null) {
|
||||
return null;
|
||||
}
|
||||
return fieldSpec.getFieldsObject();
|
||||
return this.fieldSpec == null ? null : fieldSpec.getFieldsObject();
|
||||
}
|
||||
|
||||
public DBObject getSortObject() {
|
||||
|
||||
if (this.sort == null && this.sort == null) {
|
||||
if (this.sort == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
|
||||
if (this.sort != null) {
|
||||
for (org.springframework.data.domain.Sort.Order order : this.sort) {
|
||||
dbo.put(order.getProperty(), order.isAscending() ? 1 : -1);
|
||||
}
|
||||
for (org.springframework.data.domain.Sort.Order order : this.sort) {
|
||||
dbo.put(order.getProperty(), order.isAscending() ? 1 : -1);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.gridfs.GridFSFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface GridFsOperations extends ResourcePatternResolver {
|
||||
|
||||
@@ -126,7 +127,7 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* Returns all {@link GridFsResource} with the given file name.
|
||||
*
|
||||
* @param filename
|
||||
* @return
|
||||
* @return the resource if it exists or {@literal null}.
|
||||
* @see ResourcePatternResolver#getResource(String)
|
||||
*/
|
||||
GridFsResource getResource(String filename);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,6 +43,7 @@ import com.mongodb.gridfs.GridFSInputFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver {
|
||||
|
||||
@@ -198,7 +199,9 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
* @see org.springframework.core.io.ResourceLoader#getResource(java.lang.String)
|
||||
*/
|
||||
public GridFsResource getResource(String location) {
|
||||
return new GridFsResource(findOne(query(whereFilename().is(location))));
|
||||
|
||||
GridFSDBFile file = findOne(query(whereFilename().is(location)));
|
||||
return file != null ? new GridFsResource(file) : null;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -33,6 +33,7 @@ import javax.enterprise.inject.spi.ProcessBean;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.repository.cdi.CdiRepositoryBean;
|
||||
import org.springframework.data.repository.cdi.CdiRepositoryExtensionSupport;
|
||||
|
||||
/**
|
||||
@@ -76,26 +77,29 @@ public class MongoRepositoryExtension extends CdiRepositoryExtensionSupport {
|
||||
Set<Annotation> qualifiers = entry.getValue();
|
||||
|
||||
// Create the bean representing the repository.
|
||||
Bean<?> repositoryBean = createRepositoryBean(repositoryType, qualifiers, beanManager);
|
||||
CdiRepositoryBean<?> repositoryBean = createRepositoryBean(repositoryType, qualifiers, beanManager);
|
||||
|
||||
if (LOG.isInfoEnabled()) {
|
||||
LOG.info(String.format("Registering bean for %s with qualifiers %s.", repositoryType.getName(), qualifiers));
|
||||
}
|
||||
|
||||
// Register the bean to the container.
|
||||
registerBean(repositoryBean);
|
||||
afterBeanDiscovery.addBean(repositoryBean);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Bean}.
|
||||
* Creates a {@link CdiRepositoryBean} for the repository of the given type.
|
||||
*
|
||||
* @param <T> The type of the repository.
|
||||
* @param repositoryType The class representing the repository.
|
||||
* @param beanManager The BeanManager instance.
|
||||
* @return The bean.
|
||||
* @param <T> the type of the repository.
|
||||
* @param repositoryType the class representing the repository.
|
||||
* @param qualifiers the qualifiers to be applied to the bean.
|
||||
* @param beanManager the BeanManager instance.
|
||||
* @return
|
||||
*/
|
||||
private <T> Bean<T> createRepositoryBean(Class<T> repositoryType, Set<Annotation> qualifiers, BeanManager beanManager) {
|
||||
private <T> CdiRepositoryBean<T> createRepositoryBean(Class<T> repositoryType, Set<Annotation> qualifiers,
|
||||
BeanManager beanManager) {
|
||||
|
||||
// Determine the MongoOperations bean which matches the qualifiers of the repository.
|
||||
Bean<MongoOperations> mongoOperations = this.mongoOperations.get(qualifiers);
|
||||
|
||||
@@ -81,7 +81,7 @@ public @interface EnableMongoRepositories {
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String repositoryImplementationPostfix() default "";
|
||||
String repositoryImplementationPostfix() default "Impl";
|
||||
|
||||
/**
|
||||
* Configures the location of where to find the Spring Data named queries properties file. Will default to
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -153,7 +153,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class CollectionExecution extends Execution {
|
||||
final class CollectionExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
@@ -176,7 +176,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class PagedExecution extends Execution {
|
||||
final class PagedExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
@@ -213,7 +213,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class SingleEntityExecution extends Execution {
|
||||
final class SingleEntityExecution extends Execution {
|
||||
|
||||
private final boolean countProjection;
|
||||
|
||||
@@ -239,7 +239,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
class GeoNearExecution extends Execution {
|
||||
final class GeoNearExecution extends Execution {
|
||||
|
||||
private final MongoParameterAccessor accessor;
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -209,7 +209,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -248,7 +248,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -259,7 +259,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -197,7 +197,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -246,7 +246,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -257,7 +257,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -197,7 +197,7 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="mongo-template-ref" type="mongoTemplateRef" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoTemplate">
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
The reference to a MongoTemplate. Will default to 'mongoTemplate'.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
@@ -261,7 +261,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -272,7 +272,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -276,7 +276,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -287,7 +287,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<xsd:element name="mongo" type="mongoType">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.core.MongoFactoryBean"><![CDATA[
|
||||
<xsd:documentation source="org.springframework.data.mongodb.core.MongoFactoryBean"><![CDATA[
|
||||
Defines a Mongo instance used for accessing MongoDB'.
|
||||
]]></xsd:documentation>
|
||||
<xsd:appinfo>
|
||||
@@ -283,7 +283,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoTemplate"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoTemplate"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -294,7 +294,7 @@ The name of the Mongo object that determines what server to monitor. (by default
|
||||
<xsd:annotation>
|
||||
<xsd:appinfo>
|
||||
<tool:annotation kind="ref">
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.core.MongoFactoryBean"/>
|
||||
<tool:assignable-to type="org.springframework.data.mongodb.core.MongoFactoryBean"/>
|
||||
</tool:annotation>
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
@@ -628,6 +628,12 @@ The reference to a Mongoconverter instance.
|
||||
</xsd:appinfo>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="bucket" type="xsd:string" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation><![CDATA[
|
||||
The GridFs bucket string.]]></xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:schema>
|
||||
@@ -19,6 +19,7 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Ignore;
|
||||
@@ -40,8 +41,7 @@ import com.mongodb.ServerAddress;
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceReplicaSetTests {
|
||||
|
||||
@Autowired
|
||||
private ApplicationContext ctx;
|
||||
@Autowired private ApplicationContext ctx;
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -53,7 +53,10 @@ public class MongoNamespaceReplicaSetTests {
|
||||
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
|
||||
|
||||
assertThat(replicaSetSeeds, is(notNullValue()));
|
||||
assertThat(replicaSetSeeds, hasItems(new ServerAddress("127.0.0.1", 10001), new ServerAddress("localhost", 10002)));
|
||||
assertThat(
|
||||
replicaSetSeeds,
|
||||
hasItems(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001),
|
||||
new ServerAddress(InetAddress.getByName("localhost"), 10002)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -53,8 +53,10 @@ public class MongoNamespaceTests {
|
||||
|
||||
@Test
|
||||
public void testMongoSingleton() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("noAttrMongo"));
|
||||
MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&noAttrMongo");
|
||||
|
||||
assertNull(getField(mfb, "host"));
|
||||
assertNull(getField(mfb, "port"));
|
||||
}
|
||||
@@ -64,8 +66,10 @@ public class MongoNamespaceTests {
|
||||
|
||||
assertTrue(ctx.containsBean("defaultMongo"));
|
||||
MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&defaultMongo");
|
||||
|
||||
String host = (String) getField(mfb, "host");
|
||||
Integer port = (Integer) getField(mfb, "port");
|
||||
|
||||
assertEquals("localhost", host);
|
||||
assertEquals(new Integer(27017), port);
|
||||
|
||||
@@ -105,8 +109,10 @@ public class MongoNamespaceTests {
|
||||
|
||||
@Test
|
||||
public void testSecondMongoDbFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("secondMongoDbFactory"));
|
||||
MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("secondMongoDbFactory");
|
||||
|
||||
Mongo mongo = (Mongo) getField(dbf, "mongo");
|
||||
assertEquals("localhost", mongo.getAddress().getHost());
|
||||
assertEquals(27017, mongo.getAddress().getPort());
|
||||
@@ -137,10 +143,13 @@ public class MongoNamespaceTests {
|
||||
*/
|
||||
@Test
|
||||
public void testMongoTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoTemplate"));
|
||||
MongoOperations operations = (MongoOperations) ctx.getBean("mongoTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "mongoConverter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
@@ -150,10 +159,13 @@ public class MongoNamespaceTests {
|
||||
*/
|
||||
@Test
|
||||
public void testSecondMongoTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("anotherMongoTemplate"));
|
||||
MongoOperations operations = (MongoOperations) ctx.getBean("anotherMongoTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
WriteConcern writeConcern = (WriteConcern) getField(operations, "writeConcern");
|
||||
assertEquals(WriteConcern.SAFE, writeConcern);
|
||||
}
|
||||
@@ -163,10 +175,13 @@ public class MongoNamespaceTests {
|
||||
*/
|
||||
@Test
|
||||
public void testGridFsTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("gridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("gridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
@@ -176,10 +191,31 @@ public class MongoNamespaceTests {
|
||||
*/
|
||||
@Test
|
||||
public void testSecondGridFsTemplateFactory() {
|
||||
assertTrue(ctx.containsBean("antoherGridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("antoherGridFsTemplate");
|
||||
|
||||
assertTrue(ctx.containsBean("secondGridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("secondGridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
assertEquals(null, getField(operations, "bucket"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-823
|
||||
*/
|
||||
@Test
|
||||
public void testThirdGridFsTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("thirdGridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("thirdGridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
assertEquals("bucketString", getField(operations, "bucket"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
@@ -187,14 +223,19 @@ public class MongoNamespaceTests {
|
||||
@Test
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testMongoSingletonWithPropertyPlaceHolders() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("mongo"));
|
||||
MongoFactoryBean mfb = (MongoFactoryBean) ctx.getBean("&mongo");
|
||||
|
||||
String host = (String) getField(mfb, "host");
|
||||
Integer port = (Integer) getField(mfb, "port");
|
||||
|
||||
assertEquals("127.0.0.1", host);
|
||||
assertEquals(new Integer(27017), port);
|
||||
|
||||
Mongo mongo = mfb.getObject();
|
||||
MongoOptions mongoOpts = mongo.getMongoOptions();
|
||||
|
||||
assertEquals(8, mongoOpts.connectionsPerHost);
|
||||
assertEquals(1000, mongoOpts.connectTimeout);
|
||||
assertEquals(1500, mongoOpts.maxWaitTime);
|
||||
|
||||
@@ -18,12 +18,15 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
@@ -35,6 +38,8 @@ import com.mongodb.ServerAddress;
|
||||
*/
|
||||
public class ServerAddressPropertyEditorUnitTests {
|
||||
|
||||
@Rule public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
ServerAddressPropertyEditor editor;
|
||||
|
||||
@Before
|
||||
@@ -81,11 +86,111 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
assertNull(editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackShort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "::1";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackShortWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "::1";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Here we detect no port since the last segment of the address contains leading zeros.
|
||||
*
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackLong() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:0001";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressLoopbackLongWithBrackets() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "[0000:0000:0000:0000:0000:0000:0000:0001]";
|
||||
editor.setAsText(hostAddress);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, null, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* We can't tell whether the last part of the hostAddress represents a port or not.
|
||||
*
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void shouldFailToHandleAmbiguousIPv6HostaddressLongWithoutPortAndWithoutBrackets() throws UnknownHostException {
|
||||
|
||||
expectedException.expect(IllegalArgumentException.class);
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:128";
|
||||
editor.setAsText(hostAddress);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressExampleAddressWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "0000:0000:0000:0000:0000:0000:0000:0001";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-808
|
||||
*/
|
||||
@Test
|
||||
public void handleIPv6HostaddressExampleAddressInBracketsWithPort() throws UnknownHostException {
|
||||
|
||||
String hostAddress = "[0000:0000:0000:0000:0000:0000:0000:0001]";
|
||||
int port = 27017;
|
||||
editor.setAsText(hostAddress + ":" + port);
|
||||
|
||||
assertSingleAddressWithPort(hostAddress, port, editor.getValue());
|
||||
}
|
||||
|
||||
private static void assertSingleAddressOfLocalhost(Object result) throws UnknownHostException {
|
||||
assertSingleAddressWithPort("localhost", null, result);
|
||||
}
|
||||
|
||||
private static void assertSingleAddressWithPort(String hostAddress, Integer port, Object result)
|
||||
throws UnknownHostException {
|
||||
|
||||
assertThat(result, is(instanceOf(ServerAddress[].class)));
|
||||
Collection<ServerAddress> addresses = Arrays.asList((ServerAddress[]) result);
|
||||
assertThat(addresses, hasSize(1));
|
||||
assertThat(addresses, hasItem(new ServerAddress("localhost")));
|
||||
if (port == null) {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress))));
|
||||
} else {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress), port)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -51,6 +51,7 @@ import org.springframework.dao.OptimisticLockingFailureException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
@@ -72,6 +73,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBCollection;
|
||||
@@ -93,6 +95,7 @@ import com.mongodb.WriteResult;
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Komi Innocent
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
@@ -163,6 +166,8 @@ public class MongoTemplateTests {
|
||||
template.dropCollection(ObjectWith3AliasedFields.class);
|
||||
template.dropCollection(ObjectWith3AliasedFieldsAndNestedAddress.class);
|
||||
template.dropCollection(BaseDoc.class);
|
||||
template.dropCollection(ObjectWithEnumValue.class);
|
||||
template.dropCollection(DocumentWithCollection.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -216,6 +221,7 @@ public class MongoTemplateTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-480
|
||||
* @see DATAMONGO-799
|
||||
*/
|
||||
@Test
|
||||
public void throwsExceptionForUpdateWithInvalidPushOperator() {
|
||||
@@ -231,8 +237,10 @@ public class MongoTemplateTests {
|
||||
|
||||
thrown.expect(DataIntegrityViolationException.class);
|
||||
thrown.expectMessage("Execution");
|
||||
thrown.expectMessage("$push");
|
||||
thrown.expectMessage("UPDATE");
|
||||
thrown.expectMessage("array");
|
||||
thrown.expectMessage("firstName");
|
||||
thrown.expectMessage("failed");
|
||||
|
||||
Query query = new Query(Criteria.where("firstName").is("Amol"));
|
||||
Update upd = new Update().push("age", 29);
|
||||
@@ -1796,12 +1804,12 @@ public class MongoTemplateTests {
|
||||
|
||||
Document doc = new Document();
|
||||
doc.id = "4711";
|
||||
doc.model = new ModelA().withValue("foo");
|
||||
doc.model = new ModelA("foo");
|
||||
template.insert(doc);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(doc.id));
|
||||
String newModelValue = "bar";
|
||||
Update update = Update.update("model", new ModelA().withValue(newModelValue));
|
||||
Update update = Update.update("model", new ModelA(newModelValue));
|
||||
template.updateFirst(query, update, Document.class);
|
||||
|
||||
Document result = template.findOne(query, Document.class);
|
||||
@@ -2055,25 +2063,198 @@ public class MongoTemplateTests {
|
||||
assertThat(result.get(0).field, is(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-816
|
||||
*/
|
||||
@Test
|
||||
public void shouldExecuteQueryShouldMapQueryBeforeQueryExecution() {
|
||||
|
||||
ObjectWithEnumValue o = new ObjectWithEnumValue();
|
||||
o.value = EnumValue.VALUE2;
|
||||
template.save(o);
|
||||
|
||||
Query q = Query.query(Criteria.where("value").in(EnumValue.VALUE2));
|
||||
|
||||
template.executeQuery(q, StringUtils.uncapitalize(ObjectWithEnumValue.class.getSimpleName()),
|
||||
new DocumentCallbackHandler() {
|
||||
|
||||
@Override
|
||||
public void processDocument(DBObject dbObject) throws MongoException, DataAccessException {
|
||||
|
||||
assertThat(dbObject, is(notNullValue()));
|
||||
|
||||
ObjectWithEnumValue result = template.getConverter().read(ObjectWithEnumValue.class, dbObject);
|
||||
|
||||
assertThat(result.value, is(EnumValue.VALUE2));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateFirstShouldIncreaseVersionForVersionedEntity() {
|
||||
|
||||
VersionedPerson person = new VersionedPerson();
|
||||
person.firstname = "Dave";
|
||||
person.lastname = "Matthews";
|
||||
template.save(person);
|
||||
assertThat(person.id, is(notNullValue()));
|
||||
|
||||
Query qry = query(where("id").is(person.id));
|
||||
VersionedPerson personAfterFirstSave = template.findOne(qry, VersionedPerson.class);
|
||||
assertThat(personAfterFirstSave.version, is(0L));
|
||||
|
||||
template.updateFirst(qry, Update.update("lastname", "Bubu"), VersionedPerson.class);
|
||||
|
||||
VersionedPerson personAfterUpdateFirst = template.findOne(qry, VersionedPerson.class);
|
||||
assertThat(personAfterUpdateFirst.version, is(1L));
|
||||
assertThat(personAfterUpdateFirst.lastname, is("Bubu"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateFirstShouldIncreaseVersionOnlyForFirstMatchingEntity() {
|
||||
|
||||
VersionedPerson person1 = new VersionedPerson();
|
||||
person1.firstname = "Dave";
|
||||
|
||||
VersionedPerson person2 = new VersionedPerson();
|
||||
person2.firstname = "Dave";
|
||||
|
||||
template.save(person1);
|
||||
template.save(person2);
|
||||
Query q = query(where("id").in(person1.id, person2.id));
|
||||
|
||||
template.updateFirst(q, Update.update("lastname", "Metthews"), VersionedPerson.class);
|
||||
|
||||
for (VersionedPerson p : template.find(q, VersionedPerson.class)) {
|
||||
if ("Metthews".equals(p.lastname)) {
|
||||
assertThat(p.version, equalTo(Long.valueOf(1)));
|
||||
} else {
|
||||
assertThat(p.version, equalTo(Long.valueOf(0)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-811
|
||||
*/
|
||||
@Test
|
||||
public void updateMultiShouldIncreaseVersionOfAllUpdatedEntities() {
|
||||
|
||||
VersionedPerson person1 = new VersionedPerson();
|
||||
person1.firstname = "Dave";
|
||||
|
||||
VersionedPerson person2 = new VersionedPerson();
|
||||
person2.firstname = "Dave";
|
||||
|
||||
template.save(person1);
|
||||
template.save(person2);
|
||||
|
||||
Query q = query(where("id").in(person1.id, person2.id));
|
||||
template.updateMulti(q, Update.update("lastname", "Metthews"), VersionedPerson.class);
|
||||
|
||||
for (VersionedPerson p : template.find(q, VersionedPerson.class)) {
|
||||
assertThat(p.version, equalTo(Long.valueOf(1)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-686
|
||||
*/
|
||||
@Test
|
||||
public void itShouldBePossibleToReuseAnExistingQuery() {
|
||||
|
||||
Sample sample = new Sample();
|
||||
sample.id = "42";
|
||||
sample.field = "A";
|
||||
|
||||
template.save(sample);
|
||||
|
||||
Query query = new Query();
|
||||
query.addCriteria(where("_id").in("42", "43"));
|
||||
|
||||
assertThat(template.count(query, Sample.class), is(1L));
|
||||
|
||||
query.with(new PageRequest(0, 10));
|
||||
query.with(new Sort("field"));
|
||||
|
||||
assertThat(template.find(query, Sample.class), is(not(empty())));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void findAndModifyShouldRetrainTypeInformationWithinUpdatedType() {
|
||||
|
||||
Document document = new Document();
|
||||
document.model = new ModelA("value1");
|
||||
|
||||
template.save(document);
|
||||
|
||||
Query query = query(where("id").is(document.id));
|
||||
Update update = Update.update("model", new ModelA("value2"));
|
||||
template.findAndModify(query, update, Document.class);
|
||||
|
||||
Document retrieved = template.findOne(query, Document.class);
|
||||
Assert.assertThat(retrieved.model, instanceOf(ModelA.class));
|
||||
Assert.assertThat(retrieved.model.value(), equalTo("value2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updatesShouldRetainTypeInformationEvenForCollections() {
|
||||
|
||||
DocumentWithCollection doc = new DocumentWithCollection();
|
||||
doc.id = "4711";
|
||||
doc.model = new ArrayList<Model>();
|
||||
doc.model.add(new ModelA("foo"));
|
||||
template.insert(doc);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is(doc.id));
|
||||
query.addCriteria(where("model.value").is("foo"));
|
||||
String newModelValue = "bar";
|
||||
Update update = Update.update("model.$", new ModelA(newModelValue));
|
||||
template.updateFirst(query, update, DocumentWithCollection.class);
|
||||
|
||||
Query findQuery = new Query(Criteria.where("id").is(doc.id));
|
||||
DocumentWithCollection result = template.findOne(findQuery, DocumentWithCollection.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.id, is(doc.id));
|
||||
assertThat(result.model, is(notNullValue()));
|
||||
assertThat(result.model, hasSize(1));
|
||||
assertThat(result.model.get(0).value(), is(newModelValue));
|
||||
}
|
||||
|
||||
static class DocumentWithCollection {
|
||||
|
||||
@Id public String id;
|
||||
public List<Model> model;
|
||||
}
|
||||
|
||||
static interface Model {
|
||||
String value();
|
||||
|
||||
Model withValue(String value);
|
||||
}
|
||||
|
||||
static class ModelA implements Model {
|
||||
|
||||
private String value;
|
||||
|
||||
@Override
|
||||
public String value() {
|
||||
return this.value;
|
||||
ModelA(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Model withValue(String value) {
|
||||
this.value = value;
|
||||
return this;
|
||||
public String value() {
|
||||
return this.value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2180,4 +2361,14 @@ public class MongoTemplateTests {
|
||||
static class ObjectWith3AliasedFieldsAndNestedAddress extends ObjectWith3AliasedFields {
|
||||
@Field("adr") Address address;
|
||||
}
|
||||
|
||||
static enum EnumValue {
|
||||
VALUE1, VALUE2, VALUE3
|
||||
}
|
||||
|
||||
static class ObjectWithEnumValue {
|
||||
|
||||
@Id String id;
|
||||
EnumValue value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2012 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,7 +21,6 @@ import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@@ -29,10 +28,10 @@ import org.bson.types.ObjectId;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentMatcher;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.context.support.GenericApplicationContext;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
@@ -113,7 +112,10 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
|
||||
@Test(expected = InvalidDataAccessApiUsageException.class)
|
||||
public void rejectsNotFoundMapReduceResource() {
|
||||
template.setApplicationContext(new GenericApplicationContext());
|
||||
|
||||
GenericApplicationContext ctx = new GenericApplicationContext();
|
||||
ctx.refresh();
|
||||
template.setApplicationContext(ctx);
|
||||
template.mapReduce("foo", "classpath:doesNotExist.js", "function() {}", Person.class);
|
||||
}
|
||||
|
||||
@@ -212,18 +214,25 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
GenericApplicationContext applicationContext = new GenericApplicationContext();
|
||||
applicationContext.getBeanFactory().registerSingleton("foo",
|
||||
new MongoPersistentEntityIndexCreator(new MongoMappingContext(), factory));
|
||||
applicationContext.refresh();
|
||||
|
||||
GenericApplicationContext spy = spy(applicationContext);
|
||||
|
||||
MongoTemplate mongoTemplate = new MongoTemplate(factory, converter);
|
||||
mongoTemplate.setApplicationContext(applicationContext);
|
||||
mongoTemplate.setApplicationContext(spy);
|
||||
|
||||
Collection<ApplicationListener<?>> listeners = applicationContext.getApplicationListeners();
|
||||
assertThat(listeners, hasSize(1));
|
||||
verify(spy, times(1)).addApplicationListener(argThat(new ArgumentMatcher<MongoPersistentEntityIndexCreator>() {
|
||||
|
||||
ApplicationListener<?> listener = listeners.iterator().next();
|
||||
@Override
|
||||
public boolean matches(Object argument) {
|
||||
|
||||
assertThat(listener, is(instanceOf(MongoPersistentEntityIndexCreator.class)));
|
||||
MongoPersistentEntityIndexCreator creator = (MongoPersistentEntityIndexCreator) listener;
|
||||
assertThat(creator.isIndexCreatorFor(mappingContext), is(true));
|
||||
if (!(argument instanceof MongoPersistentEntityIndexCreator)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return ((MongoPersistentEntityIndexCreator) argument).isIndexCreatorFor(mappingContext);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
class AutogenerateableId {
|
||||
|
||||
@@ -26,10 +26,12 @@ import java.io.BufferedInputStream;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Scanner;
|
||||
|
||||
import org.joda.time.LocalDateTime;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
@@ -41,6 +43,7 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
@@ -107,6 +110,7 @@ public class AggregationTests {
|
||||
mongoTemplate.dropCollection(DATAMONGO753.class);
|
||||
mongoTemplate.dropCollection(Data.class);
|
||||
mongoTemplate.dropCollection(DATAMONGO788.class);
|
||||
mongoTemplate.dropCollection(User.class);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -744,6 +748,40 @@ public class AggregationTests {
|
||||
assertThat((Integer) items.get(1).get("y"), is(1));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-806
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowGroupByIdFields() {
|
||||
|
||||
mongoTemplate.dropCollection(User.class);
|
||||
|
||||
LocalDateTime now = new LocalDateTime();
|
||||
|
||||
User user1 = new User("u1", new PushMessage("1", "aaa", now.toDate()));
|
||||
User user2 = new User("u2", new PushMessage("2", "bbb", now.minusDays(2).toDate()));
|
||||
User user3 = new User("u3", new PushMessage("3", "ccc", now.minusDays(1).toDate()));
|
||||
|
||||
mongoTemplate.save(user1);
|
||||
mongoTemplate.save(user2);
|
||||
mongoTemplate.save(user3);
|
||||
|
||||
Aggregation agg = newAggregation( //
|
||||
project("id", "msgs"), //
|
||||
unwind("msgs"), //
|
||||
match(where("msgs.createDate").gt(now.minusDays(1).toDate())), //
|
||||
group("id").push("msgs").as("msgs") //
|
||||
);
|
||||
|
||||
AggregationResults<DBObject> results = mongoTemplate.aggregate(agg, User.class, DBObject.class);
|
||||
|
||||
List<DBObject> mappedResults = results.getMappedResults();
|
||||
|
||||
DBObject firstItem = mappedResults.get(0);
|
||||
assertThat(firstItem.get("_id"), is(notNullValue()));
|
||||
assertThat(String.valueOf(firstItem.get("_id")), is("u1"));
|
||||
}
|
||||
|
||||
private void assertLikeStats(LikeStats like, String id, long count) {
|
||||
|
||||
assertThat(like, is(notNullValue()));
|
||||
@@ -827,4 +865,37 @@ public class AggregationTests {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-806
|
||||
*/
|
||||
static class User {
|
||||
|
||||
@Id String id;
|
||||
List<PushMessage> msgs;
|
||||
|
||||
public User() {}
|
||||
|
||||
public User(String id, PushMessage... msgs) {
|
||||
this.id = id;
|
||||
this.msgs = Arrays.asList(msgs);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-806
|
||||
*/
|
||||
static class PushMessage {
|
||||
|
||||
@Id String id;
|
||||
String content;
|
||||
Date createDate;
|
||||
|
||||
public PushMessage() {}
|
||||
|
||||
public PushMessage(String id, String content, Date createDate) {
|
||||
this.id = id;
|
||||
this.content = content;
|
||||
this.createDate = createDate;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,10 @@ import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
@@ -76,12 +79,23 @@ public class TypeBasedAggregationOperationContextUnitTests {
|
||||
assertThat(context.getReference(field), is(context.getReference("bar.name")));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-806
|
||||
*/
|
||||
@Test
|
||||
public void aliasesIdFieldCorrectly() {
|
||||
|
||||
AggregationOperationContext context = getContext(Foo.class);
|
||||
assertThat(context.getReference("id"), is(new FieldReference(new ExposedField(Fields.field("id", "_id"), true))));
|
||||
}
|
||||
|
||||
private TypeBasedAggregationOperationContext getContext(Class<?> type) {
|
||||
return new TypeBasedAggregationOperationContext(type, context, mapper);
|
||||
}
|
||||
|
||||
static class Foo {
|
||||
|
||||
@Id String id;
|
||||
Bar bar;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -56,6 +56,7 @@ import com.mongodb.QueryBuilder;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class QueryMapperUnitTests {
|
||||
@@ -466,6 +467,40 @@ public class QueryMapperUnitTests {
|
||||
assertThat(result.get("myvalue"), is((Object) "$center"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void shouldExcludeDBRefAssociation() {
|
||||
|
||||
Query query = query(where("someString").is("foo"));
|
||||
query.fields().exclude("reference");
|
||||
|
||||
BasicMongoPersistentEntity<?> entity = context.getPersistentEntity(WithDBRef.class);
|
||||
DBObject queryResult = mapper.getMappedObject(query.getQueryObject(), entity);
|
||||
DBObject fieldsResult = mapper.getMappedObject(query.getFieldsObject(), entity);
|
||||
|
||||
assertThat(queryResult.get("someString"), is((Object) "foo"));
|
||||
assertThat(fieldsResult.get("reference"), is((Object) 0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-686
|
||||
*/
|
||||
@Test
|
||||
public void queryMapperShouldNotChangeStateInGivenQueryObjectWhenIdConstrainedByInList() {
|
||||
|
||||
BasicMongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(Sample.class);
|
||||
String idPropertyName = persistentEntity.getIdProperty().getName();
|
||||
DBObject queryObject = query(where(idPropertyName).in("42")).getQueryObject();
|
||||
|
||||
Object idValuesBefore = getAsDBObject(queryObject, idPropertyName).get("$in");
|
||||
mapper.getMappedObject(queryObject, persistentEntity);
|
||||
Object idValuesAfter = getAsDBObject(queryObject, idPropertyName).get("$in");
|
||||
|
||||
assertThat(idValuesAfter, is(idValuesBefore));
|
||||
}
|
||||
|
||||
class IdWrapper {
|
||||
Object id;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.DBObjectTestUtils.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@@ -26,7 +27,7 @@ import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
@@ -36,6 +37,7 @@ import com.mongodb.DBObject;
|
||||
* Unit tests for {@link UpdateMapper}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class UpdateMapperUnitTests {
|
||||
@@ -63,15 +65,135 @@ public class UpdateMapperUnitTests {
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject push = DBObjectTestUtils.getAsDBObject(mappedObject, "$push");
|
||||
DBObject list = DBObjectTestUtils.getAsDBObject(push, "list");
|
||||
DBObject push = getAsDBObject(mappedObject, "$push");
|
||||
DBObject list = getAsDBObject(push, "aliased");
|
||||
|
||||
assertThat(list.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldRetainTypeInformationForNestedEntities() {
|
||||
|
||||
Update update = Update.update("model", new ModelImpl(1));
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
DBObject modelDbObject = (DBObject) set.get("model");
|
||||
assertThat(modelDbObject.get("_class"), not(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldNotPersistTypeInformationForKnownSimpleTypes() {
|
||||
|
||||
Update update = Update.update("model.value", 1);
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("_class"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-807
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldNotPersistTypeInformationForNullValues() {
|
||||
|
||||
Update update = Update.update("model", null);
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("_class"), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldRetainTypeInformationForNestedCollectionElements() {
|
||||
|
||||
Update update = Update.update("list.$", new ConcreteChildClass("42", "bubu"));
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
DBObject modelDbObject = getAsDBObject(set, "aliased.$");
|
||||
assertThat(modelDbObject.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldSupportNestedCollectionElementUpdates() {
|
||||
|
||||
Update update = Update.update("list.$.value", "foo").set("list.$.otherValue", "bar");
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject set = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(set.get("aliased.$.value"), is((Object) "foo"));
|
||||
assertThat(set.get("aliased.$.otherValue"), is((Object) "bar"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-407
|
||||
*/
|
||||
@Test
|
||||
public void updateMapperShouldWriteTypeInformationForComplexNestedCollectionElementUpdates() {
|
||||
|
||||
Update update = Update.update("list.$.value", "foo").set("list.$.someObject", new ConcreteChildClass("42", "bubu"));
|
||||
|
||||
UpdateMapper mapper = new UpdateMapper(converter);
|
||||
DBObject mappedObject = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
DBObject dbo = getAsDBObject(mappedObject, "$set");
|
||||
assertThat(dbo.get("aliased.$.value"), is((Object) "foo"));
|
||||
|
||||
DBObject someObject = getAsDBObject(dbo, "aliased.$.someObject");
|
||||
assertThat(someObject, is(notNullValue()));
|
||||
assertThat(someObject.get("_class"), is((Object) ConcreteChildClass.class.getName()));
|
||||
assertThat(someObject.get("value"), is((Object) "bubu"));
|
||||
}
|
||||
|
||||
static interface Model {}
|
||||
|
||||
static class ModelImpl implements Model {
|
||||
public int value;
|
||||
|
||||
public ModelImpl(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
public class ModelWrapper {
|
||||
Model model;
|
||||
}
|
||||
|
||||
static class ParentClass {
|
||||
|
||||
String id;
|
||||
|
||||
@Field("aliased")//
|
||||
List<? extends AbstractChildClass> list;
|
||||
|
||||
public ParentClass(String id, List<? extends AbstractChildClass> list) {
|
||||
@@ -85,10 +207,13 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
String id;
|
||||
String value;
|
||||
String otherValue;
|
||||
AbstractChildClass someObject;
|
||||
|
||||
public AbstractChildClass(String id, String value) {
|
||||
this.id = id;
|
||||
this.value = value;
|
||||
this.otherValue = "other_" + value;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,164 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.config.AbstractIntegrationTests;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.WriteResultChecking;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexed;
|
||||
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Integration tests for geo-spatial indexing.
|
||||
*
|
||||
* @author Laurent Canet
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class GeoSpatialIndexTests extends AbstractIntegrationTests {
|
||||
|
||||
@Autowired private MongoTemplate template;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
template.setWriteConcern(WriteConcern.FSYNC_SAFE);
|
||||
template.setWriteResultChecking(WriteResultChecking.EXCEPTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-778
|
||||
*/
|
||||
@Test
|
||||
public void test2dIndex() {
|
||||
|
||||
try {
|
||||
template.save(new GeoSpatialEntity2D(45.2, 4.6));
|
||||
assertThat(hasIndexOfType(GeoSpatialEntity2D.class, "2d"), is(true));
|
||||
} finally {
|
||||
template.dropCollection(GeoSpatialEntity2D.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-778
|
||||
*/
|
||||
@Test
|
||||
public void test2dSphereIndex() {
|
||||
|
||||
try {
|
||||
template.save(new GeoSpatialEntity2DSphere(45.2, 4.6));
|
||||
assertThat(hasIndexOfType(GeoSpatialEntity2DSphere.class, "2dsphere"), is(true));
|
||||
} finally {
|
||||
template.dropCollection(GeoSpatialEntity2DSphere.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-778
|
||||
*/
|
||||
@Test
|
||||
public void testHaystackIndex() {
|
||||
|
||||
try {
|
||||
template.save(new GeoSpatialEntityHaystack(45.2, 4.6, "Paris"));
|
||||
assertThat(hasIndexOfType(GeoSpatialEntityHaystack.class, "geoHaystack"), is(true));
|
||||
} finally {
|
||||
template.dropCollection(GeoSpatialEntityHaystack.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether an index with the given name exists for the given entity type.
|
||||
*
|
||||
* @param indexName
|
||||
* @param entityType
|
||||
* @return
|
||||
*/
|
||||
private boolean hasIndexOfType(Class<?> entityType, final String type) {
|
||||
|
||||
return template.execute(entityType, new CollectionCallback<Boolean>() {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Boolean doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
|
||||
for (DBObject indexInfo : collection.getIndexInfo()) {
|
||||
|
||||
DBObject keys = (DBObject) indexInfo.get("key");
|
||||
Map<String, Object> keysMap = keys.toMap();
|
||||
|
||||
for (String key : keysMap.keySet()) {
|
||||
Object indexType = keys.get(key);
|
||||
if (type.equals(indexType)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static class GeoSpatialEntity2D {
|
||||
public String id;
|
||||
@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2D) public Point location;
|
||||
|
||||
public GeoSpatialEntity2D(double x, double y) {
|
||||
this.location = new Point(x, y);
|
||||
}
|
||||
}
|
||||
|
||||
static class GeoSpatialEntityHaystack {
|
||||
public String id;
|
||||
public String name;
|
||||
@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_HAYSTACK, additionalField = "name") public Point location;
|
||||
|
||||
public GeoSpatialEntityHaystack(double x, double y, String name) {
|
||||
this.location = new Point(x, y);
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
static class GeoJsonPoint {
|
||||
String type = "Point";
|
||||
double coordinates[];
|
||||
}
|
||||
|
||||
static class GeoSpatialEntity2DSphere {
|
||||
public String id;
|
||||
@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE) public GeoJsonPoint location;
|
||||
|
||||
public GeoSpatialEntity2DSphere(double x, double y) {
|
||||
this.location = new GeoJsonPoint();
|
||||
this.location.coordinates = new double[] { x, y };
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import static org.springframework.data.mongodb.core.query.Update.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -49,6 +50,7 @@ import com.mongodb.MongoException;
|
||||
/**
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class MappingTests extends AbstractIntegrationTests {
|
||||
|
||||
@@ -465,6 +467,57 @@ public class MappingTests extends AbstractIntegrationTests {
|
||||
assertThat(result.items.get(0).id, is(items.id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void supportExcludeDbRefAssociation() {
|
||||
|
||||
template.dropCollection(Item.class);
|
||||
template.dropCollection(Container.class);
|
||||
|
||||
Item item = new Item();
|
||||
template.insert(item);
|
||||
|
||||
Container container = new Container("foo");
|
||||
container.item = item;
|
||||
|
||||
template.insert(container);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is("foo"));
|
||||
query.fields().exclude("item");
|
||||
Container result = template.findOne(query, Container.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.item, is(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-805
|
||||
*/
|
||||
@Test
|
||||
public void shouldMapFieldsOfIterableEntity() {
|
||||
|
||||
template.dropCollection(IterableItem.class);
|
||||
template.dropCollection(Container.class);
|
||||
|
||||
Item item = new IterableItem();
|
||||
item.value = "bar";
|
||||
template.insert(item);
|
||||
|
||||
Container container = new Container("foo");
|
||||
container.item = item;
|
||||
|
||||
template.insert(container);
|
||||
|
||||
Query query = new Query(Criteria.where("id").is("foo"));
|
||||
Container result = template.findOne(query, Container.class);
|
||||
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.item, is(notNullValue()));
|
||||
assertThat(result.item.value, is("bar"));
|
||||
}
|
||||
|
||||
static class Container {
|
||||
|
||||
@Id final String id;
|
||||
@@ -473,6 +526,10 @@ public class MappingTests extends AbstractIntegrationTests {
|
||||
id = new ObjectId().toString();
|
||||
}
|
||||
|
||||
public Container(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@DBRef Item item;
|
||||
@DBRef List<Item> items;
|
||||
}
|
||||
@@ -480,9 +537,26 @@ public class MappingTests extends AbstractIntegrationTests {
|
||||
static class Item {
|
||||
|
||||
@Id final String id;
|
||||
String value;
|
||||
|
||||
public Item() {
|
||||
this.id = new ObjectId().toString();
|
||||
}
|
||||
}
|
||||
|
||||
static class IterableItem extends Item implements Iterable<ItemData> {
|
||||
|
||||
List<ItemData> data = new ArrayList<MappingTests.ItemData>();
|
||||
|
||||
@Override
|
||||
public Iterator<ItemData> iterator() {
|
||||
return data.iterator();
|
||||
}
|
||||
}
|
||||
|
||||
static class ItemData {
|
||||
|
||||
String id;
|
||||
String value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -37,7 +37,7 @@ import org.springframework.data.support.IsNewStrategyFactory;
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class AuditingEventListenerUnitTests {
|
||||
|
||||
IsNewAwareAuditingHandler<Object> handler;
|
||||
IsNewAwareAuditingHandler handler;
|
||||
|
||||
IsNewStrategyFactory factory;
|
||||
AuditingEventListener listener;
|
||||
@@ -48,7 +48,7 @@ public class AuditingEventListenerUnitTests {
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
factory = new MappingContextIsNewStrategyFactory(mappingContext);
|
||||
|
||||
handler = spy(new IsNewAwareAuditingHandler<Object>(factory));
|
||||
handler = spy(new IsNewAwareAuditingHandler(factory));
|
||||
doNothing().when(handler).markCreated(Mockito.any(Object.class));
|
||||
doNothing().when(handler).markModified(Mockito.any(Object.class));
|
||||
|
||||
@@ -83,7 +83,6 @@ public class AuditingEventListenerUnitTests {
|
||||
|
||||
static class Sample {
|
||||
|
||||
@Id
|
||||
String id;
|
||||
@Id String id;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,11 +20,18 @@ import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.index.Index;
|
||||
import org.springframework.data.mongodb.core.index.Index.Duplicates;
|
||||
|
||||
public class IndexTests {
|
||||
/**
|
||||
* Unit tests for {@link Index}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
public class IndexUnitTests {
|
||||
|
||||
@Test
|
||||
public void testWithAscendingIndex() {
|
||||
@@ -69,6 +76,29 @@ public class IndexTests {
|
||||
assertEquals("{ \"min\" : 0}", i.getIndexOptions().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-778
|
||||
*/
|
||||
@Test
|
||||
public void testGeospatialIndex2DSphere() {
|
||||
|
||||
GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE);
|
||||
assertEquals("{ \"location\" : \"2dsphere\"}", i.getIndexKeys().toString());
|
||||
assertEquals("{ }", i.getIndexOptions().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-778
|
||||
*/
|
||||
@Test
|
||||
public void testGeospatialIndexGeoHaystack() {
|
||||
|
||||
GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_HAYSTACK)
|
||||
.withAdditionalField("name").withBucketSize(40);
|
||||
assertEquals("{ \"location\" : \"geoHaystack\" , \"name\" : 1}", i.getIndexKeys().toString());
|
||||
assertEquals("{ \"bucketSize\" : 40.0}", i.getIndexOptions().toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void ensuresPropertyOrder() {
|
||||
|
||||
@@ -45,10 +45,11 @@ import com.mongodb.gridfs.GridFSFile;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:gridfs/gridfs.xml")
|
||||
public class GridFsTemplateIIntegrationTests {
|
||||
public class GridFsTemplateIntegrationTests {
|
||||
|
||||
Resource resource = new ClassPathResource("gridfs/gridfs.xml");
|
||||
|
||||
@@ -162,6 +163,14 @@ public class GridFsTemplateIIntegrationTests {
|
||||
assertSame(result.get(0), reference);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-813
|
||||
*/
|
||||
@Test
|
||||
public void getResourceShouldReturnNullForNonExistingResource() {
|
||||
assertThat(operations.getResource("doesnotexist"), is(nullValue()));
|
||||
}
|
||||
|
||||
private static void assertSame(GridFSFile left, GridFSFile right) {
|
||||
|
||||
assertThat(left.getId(), is(right.getId()));
|
||||
@@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.custom;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.repository.User;
|
||||
import org.springframework.data.repository.Repository;
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public interface CustomMongoRepository extends Repository<User, String> {
|
||||
|
||||
List<User> findByUsernameCustom(String username);
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.custom;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.repository.User;
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class CustomMongoRepositoryImpl implements CustomMongoRepository {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.custom.CustomMongoRepository#findByFullName()
|
||||
*/
|
||||
@Override
|
||||
public List<User> findByUsernameCustom(String username) {
|
||||
|
||||
User user = new User();
|
||||
user.setUsername(username);
|
||||
return Arrays.asList(user);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.custom;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.ImportResource;
|
||||
import org.springframework.data.mongodb.repository.User;
|
||||
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
/**
|
||||
* Integration tests for custom Repository implementations.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class CustomRepositoryImplementationTests {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
@ImportResource("classpath:infrastructure.xml")
|
||||
static class Config {}
|
||||
|
||||
@Autowired CustomMongoRepository customMongoRepository;
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-804
|
||||
*/
|
||||
@Test
|
||||
public void shouldExecuteMethodOnCustomRepositoryImplementation() {
|
||||
|
||||
String username = "bubu";
|
||||
List<User> users = customMongoRepository.findByUsernameCustom(username);
|
||||
|
||||
assertThat(users.size(), is(1));
|
||||
assertThat(users.get(0), is(notNullValue()));
|
||||
assertThat(users.get(0).getUsername(), is(username));
|
||||
}
|
||||
}
|
||||
@@ -76,6 +76,7 @@
|
||||
<mongo:template id="anotherMongoTemplate" db-factory-ref="mongoDbFactory" write-concern="SAFE" />
|
||||
|
||||
<mongo:gridFsTemplate/>
|
||||
<mongo:gridFsTemplate id="antoherGridFsTemplate" db-factory-ref="mongoDbFactory" converter-ref="mappingConverter"/>
|
||||
<mongo:gridFsTemplate id="secondGridFsTemplate" db-factory-ref="mongoDbFactory" converter-ref="mappingConverter"/>
|
||||
<mongo:gridFsTemplate id="thirdGridFsTemplate" db-factory-ref="mongoDbFactory" converter-ref="mappingConverter" bucket="bucketString"/>
|
||||
|
||||
</beans>
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
<xi:include href="introduction/why-sd-doc.xml"/>
|
||||
<xi:include href="introduction/requirements.xml"/>
|
||||
<xi:include href="introduction/getting-started.xml"/>
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.0.RELEASE/src/docbkx/repositories.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.7.0.RC1/src/docbkx/repositories.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repositories.xml" />
|
||||
</xi:include>
|
||||
</part>
|
||||
@@ -76,10 +76,10 @@
|
||||
<part id="appendix">
|
||||
<title>Appendix</title>
|
||||
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.0.RELEASE/src/docbkx/repository-namespace-reference.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.7.0.RC1/src/docbkx/repository-namespace-reference.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repository-namespace-reference.xml" />
|
||||
</xi:include>
|
||||
<xi:include href="https://raw.github.com/SpringSource/spring-data-commons/1.6.0.RELEASE/src/docbkx/repository-query-keywords-reference.xml">
|
||||
<xi:include href="https://raw.github.com/spring-projects/spring-data-commons/1.7.0.RC1/src/docbkx/repository-query-keywords-reference.xml">
|
||||
<xi:fallback href="../../../spring-data-commons/src/docbkx/repository-query-keywords-reference.xml" />
|
||||
</xi:include>
|
||||
</part>
|
||||
|
||||
@@ -40,12 +40,12 @@
|
||||
|
||||
<para>For information on the Spring Data Mongo source code repository,
|
||||
nightly builds and snapshot artifacts please see the <ulink
|
||||
url="http://www.springsource.org/spring-data/mongodb">Spring Data Mongo
|
||||
url="http://projects.spring.io/spring-data-mongodb/">Spring Data Mongo
|
||||
homepage</ulink>.</para>
|
||||
|
||||
<para>You can help make Spring Data best serve the needs of the Spring
|
||||
community by interacting with developers through the Spring Community
|
||||
<ulink url="http://forum.springsource.org">forums</ulink>. To follow
|
||||
<ulink url="http://forum.spring.io/">forums</ulink>. To follow
|
||||
developer activity look for the mailing list information on the Spring
|
||||
Data Mongo homepage.</para>
|
||||
|
||||
@@ -55,10 +55,10 @@
|
||||
|
||||
<para>To stay up to date with the latest news and announcements in the
|
||||
Spring eco system, subscribe to the Spring Community <ulink
|
||||
url="http://www.springframework.org/">Portal</ulink>.</para>
|
||||
url="https://spring.io">Portal</ulink>.</para>
|
||||
|
||||
<para>Lastly, you can follow the SpringSource Data <ulink
|
||||
url="http://blog.springsource.com/category/data-access/">blog </ulink>or
|
||||
url="https://spring.io/blog">blog </ulink>or
|
||||
the project team on Twitter (<ulink
|
||||
url="http://twitter.com/SpringData">SpringData</ulink>)</para>
|
||||
</section>
|
||||
|
||||
@@ -12,17 +12,17 @@
|
||||
<title>Knowing Spring</title>
|
||||
|
||||
<para>Spring Data uses Spring framework's <ulink
|
||||
url="http://static.springframework.org/spring/docs/3.0.x/reference/html/spring-core.html">core</ulink>
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/spring-core.html">core</ulink>
|
||||
functionality, such as the <ulink
|
||||
url="http://static.springframework.org/spring/docs/3.0.x/reference/html/beans.html">IoC</ulink>
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/beans.html">IoC</ulink>
|
||||
container, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/validation.html#core-convert">type
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/validation.html#core-convert">type
|
||||
conversion system</ulink>, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/expressions.html">expression
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/expressions.html">expression
|
||||
language</ulink>, <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/jmx.html">JMX
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/jmx.html">JMX
|
||||
integration</ulink>, and portable <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/html/dao.html#dao-exceptions">DAO
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/dao.html#dao-exceptions">DAO
|
||||
exception hierarchy</ulink>. While it is not important to know the
|
||||
Spring APIs, understanding the concepts behind them is. At a minimum,
|
||||
the idea behind IoC should be familiar for whatever IoC container you
|
||||
@@ -40,7 +40,7 @@
|
||||
(and sometimes disarming) documentation that explains in detail the
|
||||
Spring Framework. There are a lot of articles, blog entries and books on
|
||||
the matter - take a look at the Spring framework <ulink
|
||||
url="http://www.springsource.org/documentation">home page </ulink> for
|
||||
url="https://spring.io/docs">home page </ulink> for
|
||||
more information.</para>
|
||||
</section><section id="get-started:first-steps:nosql">
|
||||
<title>Knowing NoSQL and Document databases</title>
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<para>Spring Data Document 1.x binaries requires JDK level 6.0 and above,
|
||||
and
|
||||
<ulink url="http://www.springsource.org/documentation">Spring Framework</ulink>
|
||||
<ulink url="https://spring.io/docs">Spring Framework</ulink>
|
||||
3.0.x and above.
|
||||
</para>
|
||||
<para>
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
you perform administrative operations such as drop or create a database. The
|
||||
JMX features build upon the JMX feature set available in the Spring
|
||||
Framework. See <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/reference/jmx.html">here
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/jmx.html">here
|
||||
</ulink> for more details.</para>
|
||||
|
||||
<section id="mongodb:jmx-configuration">
|
||||
@@ -20,7 +20,7 @@
|
||||
functionality</para>
|
||||
|
||||
<example>
|
||||
<title>XML schmea to configure MongoDB</title>
|
||||
<title>XML schema to configure MongoDB</title>
|
||||
|
||||
<programlisting language="xml"><?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
|
||||
@@ -638,7 +638,7 @@ public class Person {
|
||||
<para>Spring 3.0 introduced a core.convert package that provides a
|
||||
general type conversion system. This is described in detail in the
|
||||
Spring reference documentation section entitled <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/validation.html#core-convert">Spring
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/validation.html#core-convert">Spring
|
||||
3 Type Conversion</ulink>.</para>
|
||||
</note>
|
||||
|
||||
|
||||
@@ -248,7 +248,7 @@ public class MongoApp {
|
||||
<title>Examples Repository</title>
|
||||
|
||||
<para>There is an <ulink
|
||||
url="https://github.com/SpringSource/spring-data-document-examples">github
|
||||
url="https://github.com/spring-projects/spring-data-document-examples">github
|
||||
repository with several examples</ulink> that you can download and play
|
||||
around with to get a feel for how the library works.</para>
|
||||
</section>
|
||||
@@ -264,9 +264,9 @@ public class MongoApp {
|
||||
<para>For those not familiar with how to configure the Spring
|
||||
container using Java based bean metadata instead of XML based metadata
|
||||
see the high level introduction in the reference docs <ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/new-in-3.html#new-java-configuration"
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/new-in-3.0.html#new-java-configuration"
|
||||
userlevel="">here </ulink> as well as the detailed documentation<ulink
|
||||
url="http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/beans.html#beans-java-instantiating-container">
|
||||
url="http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/beans.html#beans-java-instantiating-container">
|
||||
here</ulink>.</para>
|
||||
</note></para>
|
||||
|
||||
|
||||
@@ -1,6 +1,53 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 1.4.0.RC1 (2014-01-29)
|
||||
---------------------------------------------
|
||||
|
||||
** Bug
|
||||
* [DATAMONGO-407] - Collection with generics losing element type after $set update
|
||||
* [DATAMONGO-686] - ClassCastException while reusing Query object
|
||||
* [DATAMONGO-726] - References to non existing classes in namespace XSD
|
||||
* [DATAMONGO-804] - EnableMongoRepositories repositoryImplementationPostfix() default is empty String instead of "Impl"
|
||||
* [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException
|
||||
* [DATAMONGO-806] - Spring Data MongoDB - Aggregation Framework - No property _id found for type com.entity.User
|
||||
* [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException
|
||||
* [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly
|
||||
* [DATAMONGO-811] - updateFirst methods do not increment @Version field
|
||||
* [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums.
|
||||
** Improvement
|
||||
* [DATAMONGO-778] - Create geospatial index of type other than 2d with @GeoSpatialIndexed
|
||||
* [DATAMONGO-785] - Add support for geospatial 2Dsphere and geohaystack index types
|
||||
* [DATAMONGO-787] - Guard against SpEL issue in Spring 3.2.4
|
||||
* [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x
|
||||
* [DATAMONGO-802] - Change AbstractMongoConfiguration.mongoDbFactory() to return MongoDbFactory
|
||||
* [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file
|
||||
* [DATAMONGO-822] - Add support for eager CDI repository instantiation
|
||||
* [DATAMONGO-823] - Add bucket attribute to <mongo:gridFsTemplate />
|
||||
* [DATAMONGO-837] - Upgrade MongoDB Java driver to 2.11.4
|
||||
** Task
|
||||
* [DATAMONGO-790] - Ensure compatibility with Spring Framework 4.0
|
||||
* [DATAMONGO-824] - Add contribution guidelines
|
||||
* [DATAMONGO-826] - Release Spring Data MongoDB 1.4.0.RC1
|
||||
* [DATAMONGO-835] - Code cleanups
|
||||
|
||||
Changes in version 1.3.3.RELEASE (2013-12-11)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
* [DATAMONGO-726] - Fixed classname references in namespace XSDs.
|
||||
* [DATAMONGO-788] - Projection operations do not render synthetic fields properly.
|
||||
* [DATAMONGO-795] - When adding custom converters to the mongo template it is possible to get unpredictable behaviour
|
||||
* [DATAMONGO-804] - Fix default annotation attribute value for repositoryImplementationPostfix().
|
||||
* [DATAMONGO-806] - Fixed invalid rendering of id field references.
|
||||
* [DATAMONGO-768] - Improve documentation of how to use @PersistenceConstructor
|
||||
|
||||
** Improvement
|
||||
* [DATAMONGO-791] - Added newAggregation(…) overloads to accept a List.
|
||||
* [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x
|
||||
* [DATAMONGO-800] - Improved AuditingIntegrationTests.
|
||||
** Task
|
||||
* [DATAMONGO-810] - Release 1.3.3
|
||||
|
||||
Changes in version 1.4.0.M1 (2013-11-19)
|
||||
---------------------------------------------
|
||||
** Bug
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Spring Data Document 1.4.0 M1
|
||||
Copyright (c) [2010-2013] Pivotal Inc.
|
||||
Spring Data MongoDB 1.4.0 RC1
|
||||
Copyright (c) [2010-2014] Pivotal Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
You may not use this product except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
SPRING DATA MongoDB 1.4.0.M1
|
||||
SPRING DATA MongoDB 1.4.0.RC1
|
||||
-----------------------------
|
||||
|
||||
Spring Data MongoDB is released under the terms of the Apache Software License Version 2.0 (see license.txt).
|
||||
|
||||
Reference in New Issue
Block a user