DATAMONGO-1444 - Add support for RxJava wrapper types and slice queries.

Reactive MongoDB repository can now be composed from Project Reactor and RxJava types for method arguments and return types. Query methods and methods from the base/implementation classes can be invoked with a conversion of input/output types.
This commit is contained in:
Mark Paluch
2016-03-03 16:19:18 +01:00
committed by Oliver Gierke
parent c814073441
commit 2145e212ca
94 changed files with 15565 additions and 465 deletions

View File

@@ -30,6 +30,7 @@
<dist.id>spring-data-mongodb</dist.id>
<springdata.commons>2.0.0.BUILD-SNAPSHOT</springdata.commons>
<mongo>3.2.2</mongo>
<mongo.reactivestreams>1.2.0</mongo.reactivestreams>
</properties>
<developers>

View File

@@ -51,6 +51,14 @@
<version>2.0.0.BUILD-SNAPSHOT</version>
</dependency>
<!-- reactive -->
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
<version>${reactor}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>

View File

@@ -19,6 +19,7 @@
<objenesis>1.3</objenesis>
<equalsverifier>1.5</equalsverifier>
<mongo>3.3.0</mongo>
<spring>5.0.0.BUILD-SNAPSHOT</spring>
</properties>
<dependencies>
@@ -79,6 +80,52 @@
<optional>true</optional>
</dependency>
<!-- reactive -->
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-driver-reactivestreams</artifactId>
<version>${mongo.reactivestreams}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-driver-async</artifactId>
<version>${mongo}</version>
<optional>true</optional>
<exclusions>
<exclusion>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-driver-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.mongodb</groupId>
<artifactId>bson</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
<version>${reactor}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>io.reactivex</groupId>
<artifactId>rxjava</artifactId>
<version>${rxjava}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>io.reactivex</groupId>
<artifactId>rxjava-reactive-streams</artifactId>
<version>${rxjava-reactive-streams}</version>
<optional>true</optional>
</dependency>
<!-- CDI -->
<dependency>
<groupId>javax.enterprise</groupId>
@@ -213,9 +260,11 @@
</includes>
<excludes>
<exclude>**/PerformanceTests.java</exclude>
<exclude>**/ReactivePerformanceTests.java</exclude>
</excludes>
<systemPropertyVariables>
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
<reactor.trace.cancel>true</reactor.trace.cancel>
</systemPropertyVariables>
<properties>
<property>

View File

@@ -0,0 +1,56 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.support.PersistenceExceptionTranslator;
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
import com.mongodb.reactivestreams.client.MongoDatabase;
/**
* Interface for factories creating reactive {@link MongoDatabase} instances.
*
* @author Mark Paluch
* @since 2.0
*/
public interface ReactiveMongoDatabaseFactory {
/**
* Creates a default {@link MongoDatabase} instance.
*
* @return
* @throws DataAccessException
*/
MongoDatabase getMongoDatabase() throws DataAccessException;
/**
* Creates a {@link MongoDatabase} instance to access the database with the given name.
*
* @param dbName must not be {@literal null} or empty.
* @return
* @throws DataAccessException
*/
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
/**
* Exposes a shared {@link MongoExceptionTranslator}.
*
* @return will never be {@literal null}.
*/
PersistenceExceptionTranslator getExceptionTranslator();
}

View File

@@ -15,36 +15,16 @@
*/
package org.springframework.data.mongodb.config;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.type.filter.AnnotationTypeFilter;
import org.springframework.data.annotation.Persistent;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
import org.springframework.data.mapping.model.FieldNamingStrategy;
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import org.springframework.data.mongodb.core.convert.CustomConversions;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.support.CachingIsNewStrategyFactory;
import org.springframework.data.support.IsNewStrategyFactory;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
@@ -57,16 +37,11 @@ import com.mongodb.MongoClient;
* @author Thomas Darimont
* @author Ryan Tenney
* @author Christoph Strobl
* @author Mark Paluch
* @see MongoConfigurationSupport
*/
@Configuration
public abstract class AbstractMongoConfiguration {
/**
* Return the name of the database to connect to.
*
* @return must not be {@literal null}.
*/
protected abstract String getDatabaseName();
public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport {
/**
* Return the name of the authentication database to use. Defaults to {@literal null} and will turn into the value
@@ -132,20 +107,6 @@ public abstract class AbstractMongoConfiguration {
return mappingBasePackage == null ? null : mappingBasePackage.getName();
}
/**
* Returns the base packages to scan for MongoDB mapped entities at startup. Will return the package name of the
* configuration class' (the concrete class, not this one here) by default. So if you have a
* {@code com.acme.AppConfig} extending {@link AbstractMongoConfiguration} the base package will be considered
* {@code com.acme} unless the method is overridden to implement alternate behavior.
*
* @return the base packages to scan for mapped {@link Document} classes or an empty collection to not enable scanning
* for entities.
* @since 1.10
*/
protected Collection<String> getMappingBasePackages() {
return Collections.singleton(getMappingBasePackage());
}
/**
* Return {@link UserCredentials} to be used when connecting to the MongoDB instance or {@literal null} if none shall
* be used.
@@ -159,47 +120,6 @@ public abstract class AbstractMongoConfiguration {
return null;
}
/**
* Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package.
*
* @see #getMappingBasePackage()
* @return
* @throws ClassNotFoundException
*/
@Bean
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
MongoMappingContext mappingContext = new MongoMappingContext();
mappingContext.setInitialEntitySet(getInitialEntitySet());
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
return mappingContext;
}
/**
* Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}.
*
* @return
* @throws ClassNotFoundException
*/
@Bean
public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException {
return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(mongoMappingContext()));
}
/**
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
* {@link #mongoMappingContext()}. Returns an empty {@link CustomConversions} instance by default.
*
* @return must not be {@literal null}.
*/
@Bean
public CustomConversions customConversions() {
return new CustomConversions(Collections.emptyList());
}
/**
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
@@ -219,79 +139,4 @@ public abstract class AbstractMongoConfiguration {
return converter;
}
/**
* Scans the mapping base package for classes annotated with {@link Document}. By default, it scans for entities in
* all packages returned by {@link #getMappingBasePackages()}.
*
* @see #getMappingBasePackages()
* @return
* @throws ClassNotFoundException
*/
protected Set<Class<?>> getInitialEntitySet() throws ClassNotFoundException {
Set<Class<?>> initialEntitySet = new HashSet<Class<?>>();
for (String basePackage : getMappingBasePackages()) {
initialEntitySet.addAll(scanForEntities(basePackage));
}
return initialEntitySet;
}
/**
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and
* {@link Persistent}.
*
* @param basePackage must not be {@literal null}.
* @return
* @throws ClassNotFoundException
* @since 1.10
*/
protected Set<Class<?>> scanForEntities(String basePackage) throws ClassNotFoundException {
if (!StringUtils.hasText(basePackage)) {
return Collections.emptySet();
}
Set<Class<?>> initialEntitySet = new HashSet<Class<?>>();
if (StringUtils.hasText(basePackage)) {
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
false);
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
initialEntitySet
.add(ClassUtils.forName(candidate.getBeanClassName(), AbstractMongoConfiguration.class.getClassLoader()));
}
}
return initialEntitySet;
}
/**
* Configures whether to abbreviate field names for domain objects by configuring a
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced
* customization needs, consider overriding {@link #mappingMongoConverter()}.
*
* @return
*/
protected boolean abbreviateFieldNames() {
return false;
}
/**
* Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created.
*
* @return
* @since 1.5
*/
protected FieldNamingStrategy fieldNamingStrategy() {
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
: PropertyNameFieldNamingStrategy.INSTANCE;
}
}

View File

@@ -0,0 +1,90 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import com.mongodb.reactivestreams.client.MongoClient;
/**
* Base class for reactive Spring Data MongoDB configuration using JavaConfig.
*
* @author Mark Paluch
* @since 2.0
* @see MongoConfigurationSupport
*/
@Configuration
public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport {
/**
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
*
* @return
*/
public abstract MongoClient mongoClient();
/**
* Creates a {@link ReactiveMongoTemplate}.
*
* @return
*/
@Bean
public ReactiveMongoTemplate reactiveMongoTemplate() throws Exception {
return new ReactiveMongoTemplate(mongoDbFactory(), mappingMongoConverter());
}
/**
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link Mongo} instance
* configured in {@link #mongoClient()}.
*
* @see #mongoClient()
* @see #reactiveMongoTemplate()
* @return
* @throws Exception
*/
@Bean
public SimpleReactiveMongoDatabaseFactory mongoDbFactory() {
return new SimpleReactiveMongoDatabaseFactory(mongoClient(), getDatabaseName());
}
/**
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
*
* @see #customConversions()
* @see #mongoMappingContext()
* @see #mongoDbFactory()
* @return
* @throws Exception
*/
@Bean
public MappingMongoConverter mappingMongoConverter() throws Exception {
MappingMongoConverter converter = new MappingMongoConverter(ReactiveMongoTemplate.NO_OP_REF_RESOLVER,
mongoMappingContext());
converter.setCustomConversions(customConversions());
return converter;
}
}

View File

@@ -120,6 +120,12 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
converterBuilder.addPropertyValue("customConversions", conversionsDefinition);
}
if(!registry.containsBeanDefinition("indexOperationsProvider")){
BeanDefinitionBuilder indexOperationsProviderBuilder = BeanDefinitionBuilder.genericBeanDefinition("org.springframework.data.mongodb.core.DefaultIndexOperationsProvider");
indexOperationsProviderBuilder.addConstructorArgReference(dbFactoryRef);
parserContext.registerBeanComponent(new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider"));
}
try {
registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME);
} catch (NoSuchBeanDefinitionException ignored) {
@@ -129,7 +135,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder
.genericBeanDefinition(MongoPersistentEntityIndexCreator.class);
indexHelperBuilder.addConstructorArgReference(ctxRef);
indexHelperBuilder.addConstructorArgReference(dbFactoryRef);
indexHelperBuilder.addConstructorArgReference("indexOperationsProvider");
indexHelperBuilder.addDependsOn(ctxRef);
parserContext.registerBeanComponent(new BeanComponentDefinition(indexHelperBuilder.getBeanDefinition(),

View File

@@ -0,0 +1,198 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.type.filter.AnnotationTypeFilter;
import org.springframework.data.annotation.Persistent;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
import org.springframework.data.mapping.model.FieldNamingStrategy;
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import org.springframework.data.mongodb.core.convert.CustomConversions;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.support.CachingIsNewStrategyFactory;
import org.springframework.data.support.IsNewStrategyFactory;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
/**
* Base class for Spring Data MongoDB to be extended for JavaConfiguration usage.
*
* @author Mark Paluch
* @since 2.0
*/
public abstract class MongoConfigurationSupport {
/**
* Return the name of the database to connect to.
*
* @return must not be {@literal null}.
*/
protected abstract String getDatabaseName();
/**
* Returns the base packages to scan for MongoDB mapped entities at startup. Will return the package name of the
* configuration class' (the concrete class, not this one here) by default. So if you have a
* {@code com.acme.AppConfig} extending {@link MongoConfigurationSupport} the base package will be considered
* {@code com.acme} unless the method is overridden to implement alternate behavior.
*
* @return the base packages to scan for mapped {@link Document} classes or an empty collection to not enable scanning
* for entities.
* @since 1.10
*/
protected Collection<String> getMappingBasePackages() {
Package mappingBasePackage = getClass().getPackage();
return Collections.singleton(mappingBasePackage == null ? null : mappingBasePackage.getName());
}
/**
* Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package.
*
* @see #getMappingBasePackage()
* @return
* @throws ClassNotFoundException
*/
@Bean
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
MongoMappingContext mappingContext = new MongoMappingContext();
mappingContext.setInitialEntitySet(getInitialEntitySet());
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
return mappingContext;
}
/**
* Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}.
*
* @return
* @throws ClassNotFoundException
*/
@Bean
public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException {
return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(mongoMappingContext()));
}
/**
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
* {@link #mongoMappingContext()}. Returns an empty {@link CustomConversions} instance by default.
*
* @return must not be {@literal null}.
*/
@Bean
public CustomConversions customConversions() {
return new CustomConversions(Collections.emptyList());
}
/**
* Scans the mapping base package for classes annotated with {@link Document}. By default, it scans for entities in
* all packages returned by {@link #getMappingBasePackages()}.
*
* @see #getMappingBasePackages()
* @return
* @throws ClassNotFoundException
*/
protected Set<Class<?>> getInitialEntitySet() throws ClassNotFoundException {
Set<Class<?>> initialEntitySet = new HashSet<Class<?>>();
for (String basePackage : getMappingBasePackages()) {
initialEntitySet.addAll(scanForEntities(basePackage));
}
return initialEntitySet;
}
/**
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and
* {@link Persistent}.
*
* @param basePackage must not be {@literal null}.
* @return
* @throws ClassNotFoundException
* @since 1.10
*/
protected Set<Class<?>> scanForEntities(String basePackage) throws ClassNotFoundException {
if (!StringUtils.hasText(basePackage)) {
return Collections.emptySet();
}
Set<Class<?>> initialEntitySet = new HashSet<Class<?>>();
if (StringUtils.hasText(basePackage)) {
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
false);
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
initialEntitySet
.add(ClassUtils.forName(candidate.getBeanClassName(), MongoConfigurationSupport.class.getClassLoader()));
}
}
return initialEntitySet;
}
/**
* Configures whether to abbreviate field names for domain objects by configuring a
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced
* customization needs, consider overriding {@link #mappingMongoConverter()}.
*
* @return
*/
protected boolean abbreviateFieldNames() {
return false;
}
/**
* Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created.
*
* @return
* @since 1.5
*/
protected FieldNamingStrategy fieldNamingStrategy() {
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
: PropertyNameFieldNamingStrategy.INSTANCE;
}
}

View File

@@ -15,18 +15,15 @@
*/
package org.springframework.data.mongodb.core;
import static org.springframework.data.domain.Sort.Direction.*;
import static org.springframework.data.mongodb.core.MongoTemplate.potentiallyConvertRuntimeException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.bson.Document;
import org.springframework.dao.DataAccessException;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.index.IndexDefinition;
import org.springframework.data.mongodb.core.index.IndexField;
import org.springframework.data.mongodb.core.index.IndexInfo;
import org.springframework.util.Assert;
@@ -42,28 +39,25 @@ import com.mongodb.client.model.IndexOptions;
* @author Oliver Gierke
* @author Komi Innocent
* @author Christoph Strobl
* @author Mark Paluch
*/
public class DefaultIndexOperations implements IndexOperations {
private static final Double ONE = Double.valueOf(1);
private static final Double MINUS_ONE = Double.valueOf(-1);
private static final Collection<String> TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere");
private final MongoOperations mongoOperations;
private final MongoDbFactory mongoDbFactory;
private final String collectionName;
/**
* Creates a new {@link DefaultIndexOperations}.
*
* @param mongoOperations must not be {@literal null}.
* @param mongoDbFactory must not be {@literal null}.
* @param collectionName must not be {@literal null}.
*/
public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName) {
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName) {
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
Assert.notNull(collectionName, "Collection name can not be null!");
this.mongoOperations = mongoOperations;
this.mongoDbFactory = mongoDbFactory;
this.collectionName = collectionName;
}
@@ -72,57 +66,18 @@ public class DefaultIndexOperations implements IndexOperations {
* @see org.springframework.data.mongodb.core.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)
*/
public void ensureIndex(final IndexDefinition indexDefinition) {
mongoOperations.execute(collectionName, new CollectionCallback<Object>() {
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
execute(collection -> {
Document indexOptions = indexDefinition.getIndexOptions();
if (indexOptions != null) {
IndexOptions ops = new IndexOptions();
if (indexOptions.containsKey("name")) {
ops = ops.name(indexOptions.get("name").toString());
}
if (indexOptions.containsKey("unique")) {
ops = ops.unique((Boolean) indexOptions.get("unique"));
}
if (indexOptions.containsKey("sparse")) {
ops = ops.sparse((Boolean) indexOptions.get("sparse"));
}
if (indexOptions.containsKey("background")) {
ops = ops.background((Boolean) indexOptions.get("background"));
}
if (indexOptions.containsKey("expireAfterSeconds")) {
ops = ops.expireAfter((Long) indexOptions.get("expireAfterSeconds"), TimeUnit.SECONDS);
}
if (indexOptions.containsKey("min")) {
ops = ops.min(((Number) indexOptions.get("min")).doubleValue());
}
if (indexOptions.containsKey("max")) {
ops = ops.max(((Number) indexOptions.get("max")).doubleValue());
}
if (indexOptions.containsKey("bits")) {
ops = ops.bits((Integer) indexOptions.get("bits"));
}
if (indexOptions.containsKey("bucketSize")) {
ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue());
}
if (indexOptions.containsKey("default_language")) {
ops = ops.defaultLanguage(indexOptions.get("default_language").toString());
}
if (indexOptions.containsKey("language_override")) {
ops = ops.languageOverride(indexOptions.get("language_override").toString());
}
if (indexOptions.containsKey("weights")) {
ops = ops.weights((Document) indexOptions.get("weights"));
}
IndexOptions ops = IndexConverters.DEFINITION_TO_MONGO_INDEX_OPTIONS.convert(indexDefinition);
collection.createIndex(indexDefinition.getIndexKeys(), ops);
} else {
collection.createIndex(indexDefinition.getIndexKeys());
}
return null;
}
});
}
@@ -131,7 +86,7 @@ public class DefaultIndexOperations implements IndexOperations {
* @see org.springframework.data.mongodb.core.IndexOperations#dropIndex(java.lang.String)
*/
public void dropIndex(final String name) {
mongoOperations.execute(collectionName, new CollectionCallback<Void>() {
execute(new CollectionCallback<Void>() {
public Void doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
collection.dropIndex(name);
return null;
@@ -154,7 +109,7 @@ public class DefaultIndexOperations implements IndexOperations {
*/
public List<IndexInfo> getIndexInfo() {
return mongoOperations.execute(collectionName, new CollectionCallback<List<IndexInfo>>() {
return execute(new CollectionCallback<List<IndexInfo>>() {
public List<IndexInfo> doInCollection(MongoCollection<Document> collection)
throws MongoException, DataAccessException {
@@ -169,47 +124,24 @@ public class DefaultIndexOperations implements IndexOperations {
while (cursor.hasNext()) {
Document ix = cursor.next();
Document keyDocument = (Document) ix.get("key");
int numberOfElements = keyDocument.keySet().size();
List<IndexField> indexFields = new ArrayList<IndexField>(numberOfElements);
for (String key : keyDocument.keySet()) {
Object value = keyDocument.get(key);
if (TWO_D_IDENTIFIERS.contains(value)) {
indexFields.add(IndexField.geo(key));
} else if ("text".equals(value)) {
Document weights = (Document) ix.get("weights");
for (String fieldName : weights.keySet()) {
indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString())));
}
} else {
Double keyValue = new Double(value.toString());
if (ONE.equals(keyValue)) {
indexFields.add(IndexField.create(key, ASC));
} else if (MINUS_ONE.equals(keyValue)) {
indexFields.add(IndexField.create(key, DESC));
}
}
}
String name = ix.get("name").toString();
boolean unique = ix.containsKey("unique") ? (Boolean) ix.get("unique") : false;
boolean dropDuplicates = ix.containsKey("dropDups") ? (Boolean) ix.get("dropDups") : false;
boolean sparse = ix.containsKey("sparse") ? (Boolean) ix.get("sparse") : false;
String language = ix.containsKey("default_language") ? (String) ix.get("default_language") : "";
indexInfoList.add(new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language));
IndexInfo indexInfo = IndexConverters.DOCUMENT_INDEX_INFO.convert(ix);
indexInfoList.add(indexInfo);
}
return indexInfoList;
}
});
}
public <T> T execute(CollectionCallback<T> callback) {
Assert.notNull(callback);
try {
MongoCollection<Document> collection = mongoDbFactory.getDb().getCollection(collectionName);
return callback.doInCollection(collection);
} catch (RuntimeException e) {
throw potentiallyConvertRuntimeException(e, mongoDbFactory.getExceptionTranslator());
}
}
}

View File

@@ -0,0 +1,45 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.data.mongodb.MongoDbFactory;
/**
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}. TODO: Review
* me
*
* @author Mark Paluch
* @since 2.0
*/
class DefaultIndexOperationsProvider implements IndexOperationsProvider {
private final MongoDbFactory mongoDbFactory;
/**
* @param mongoDbFactory must not be {@literal null}.
*/
DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory) {
this.mongoDbFactory = mongoDbFactory;
}
/* (non-Javadoc)
* @see org.springframework.data.mongodb.core.IndexOperationsProvider#reactiveIndexOps(java.lang.String)
*/
@Override
public IndexOperations indexOps(String collectionName) {
return new DefaultIndexOperations(mongoDbFactory, collectionName);
}
}

View File

@@ -0,0 +1,102 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.bson.Document;
import org.springframework.data.mongodb.core.index.IndexDefinition;
import org.springframework.data.mongodb.core.index.IndexInfo;
import org.springframework.util.Assert;
import com.mongodb.reactivestreams.client.ListIndexesPublisher;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Default implementation of {@link IndexOperations}.
*
* @author Mark Paluch
* @since 1.11
*/
public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
private final ReactiveMongoOperations mongoOperations;
private final String collectionName;
/**
* Creates a new {@link DefaultReactiveIndexOperations}.
*
* @param mongoOperations must not be {@literal null}.
* @param collectionName must not be {@literal null}.
*/
public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName) {
Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!");
Assert.notNull(collectionName, "Collection must not be null!");
this.mongoOperations = mongoOperations;
this.collectionName = collectionName;
}
/* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ReactiveIndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)
*/
public Mono<String> ensureIndex(final IndexDefinition indexDefinition) {
return mongoOperations.execute(collectionName, (ReactiveCollectionCallback<String>) collection -> {
Document indexOptions = indexDefinition.getIndexOptions();
if (indexOptions != null) {
return collection.createIndex(indexDefinition.getIndexKeys(),
IndexConverters.DEFINITION_TO_MONGO_INDEX_OPTIONS.convert(indexDefinition));
}
return collection.createIndex(indexDefinition.getIndexKeys());
}).next();
}
/* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ReactiveIndexOperations#dropIndex(java.lang.String)
*/
public Mono<Void> dropIndex(final String name) {
return mongoOperations.execute(collectionName, collection -> {
return Mono.from(collection.dropIndex(name));
}).flatMap(success -> Mono.<Void>empty()).next();
}
/* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ReactiveIndexOperations#dropAllIndexes()
*/
public Mono<Void> dropAllIndexes() {
return dropIndex("*");
}
/* (non-Javadoc)
* @see org.springframework.data.mongodb.core.ReactiveIndexOperations#getIndexInfo()
*/
public Flux<IndexInfo> getIndexInfo() {
return mongoOperations.execute(collectionName, collection -> {
ListIndexesPublisher<Document> indexesPublisher = collection.listIndexes(Document.class);
return Flux.from(indexesPublisher).map(t -> IndexConverters.DOCUMENT_INDEX_INFO.convert(t));
});
}
}

View File

@@ -0,0 +1,34 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import com.mongodb.DBCursor;
import com.mongodb.reactivestreams.client.FindPublisher;
/**
* Simple callback interface to allow customization of a {@link FindPublisher}.
*
* @author Mark Paluch
*/
interface FindPublisherPreparer {
/**
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
*
* @param cursor
*/
<T> FindPublisher<T> prepare(FindPublisher<T> findPublisher);
}

View File

@@ -0,0 +1,158 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static org.springframework.data.domain.Sort.Direction.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.bson.Document;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.mongodb.core.index.IndexDefinition;
import org.springframework.data.mongodb.core.index.IndexField;
import org.springframework.data.mongodb.core.index.IndexInfo;
import org.springframework.util.ObjectUtils;
import com.mongodb.client.model.IndexOptions;
/**
* {@link Converter Converters} for index-related MongoDB documents/types.
*
* @author Mark Paluch
* @since 2.0
*/
abstract class IndexConverters {
public final static Converter<IndexDefinition, IndexOptions> DEFINITION_TO_MONGO_INDEX_OPTIONS;
public final static Converter<Document, IndexInfo> DOCUMENT_INDEX_INFO;
private static final Double ONE = Double.valueOf(1);
private static final Double MINUS_ONE = Double.valueOf(-1);
private static final Collection<String> TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere");
static {
DEFINITION_TO_MONGO_INDEX_OPTIONS = getIndexDefinitionIndexOptionsConverter();
DOCUMENT_INDEX_INFO = getDocumentIndexInfoConverter();
}
private IndexConverters() {
}
private static Converter<IndexDefinition, IndexOptions> getIndexDefinitionIndexOptionsConverter() {
return indexDefinition -> {
Document indexOptions = indexDefinition.getIndexOptions();
IndexOptions ops = new IndexOptions();
if (indexOptions.containsKey("name")) {
ops = ops.name(indexOptions.get("name").toString());
}
if (indexOptions.containsKey("unique")) {
ops = ops.unique((Boolean) indexOptions.get("unique"));
}
if (indexOptions.containsKey("sparse")) {
ops = ops.sparse((Boolean) indexOptions.get("sparse"));
}
if (indexOptions.containsKey("background")) {
ops = ops.background((Boolean) indexOptions.get("background"));
}
if (indexOptions.containsKey("expireAfterSeconds")) {
ops = ops.expireAfter((Long) indexOptions.get("expireAfterSeconds"), TimeUnit.SECONDS);
}
if (indexOptions.containsKey("min")) {
ops = ops.min(((Number) indexOptions.get("min")).doubleValue());
}
if (indexOptions.containsKey("max")) {
ops = ops.max(((Number) indexOptions.get("max")).doubleValue());
}
if (indexOptions.containsKey("bits")) {
ops = ops.bits((Integer) indexOptions.get("bits"));
}
if (indexOptions.containsKey("bucketSize")) {
ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue());
}
if (indexOptions.containsKey("default_language")) {
ops = ops.defaultLanguage(indexOptions.get("default_language").toString());
}
if (indexOptions.containsKey("language_override")) {
ops = ops.languageOverride(indexOptions.get("language_override").toString());
}
if (indexOptions.containsKey("weights")) {
ops = ops.weights((org.bson.Document) indexOptions.get("weights"));
}
for (String key : indexOptions.keySet()) {
if (ObjectUtils.nullSafeEquals("2dsphere", indexOptions.get(key))) {
ops = ops.sphereVersion(2);
}
}
return ops;
};
}
private static Converter<Document, IndexInfo> getDocumentIndexInfoConverter() {
return ix -> {
Document keyDbObject = (Document) ix.get("key");
int numberOfElements = keyDbObject.keySet().size();
List<IndexField> indexFields = new ArrayList<IndexField>(numberOfElements);
for (String key : keyDbObject.keySet()) {
Object value = keyDbObject.get(key);
if (TWO_D_IDENTIFIERS.contains(value)) {
indexFields.add(IndexField.geo(key));
} else if ("text".equals(value)) {
Document weights = (Document) ix.get("weights");
for (String fieldName : weights.keySet()) {
indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString())));
}
} else {
Double keyValue = new Double(value.toString());
if (ONE.equals(keyValue)) {
indexFields.add(IndexField.create(key, ASC));
} else if (MINUS_ONE.equals(keyValue)) {
indexFields.add(IndexField.create(key, DESC));
}
}
}
String name = ix.get("name").toString();
boolean unique = ix.containsKey("unique") ? (Boolean) ix.get("unique") : false;
boolean dropDuplicates = ix.containsKey("dropDups") ? (Boolean) ix.get("dropDups") : false;
boolean sparse = ix.containsKey("sparse") ? (Boolean) ix.get("sparse") : false;
String language = ix.containsKey("default_language") ? (String) ix.get("default_language") : "";
return new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language);
};
}
}

View File

@@ -0,0 +1,33 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.dao.support.PersistenceExceptionTranslator;
/**
* TODO: Revisit for a better pattern.
* @author Mark Paluch
*/
public interface IndexOperationsProvider {
/**
* Returns the operations that can be performed on indexes
*
* @return index operations on the named collection
*/
IndexOperations indexOps(String collectionName);
}

View File

@@ -145,7 +145,7 @@ import com.mongodb.util.JSONParseException;
* @author Mark Paluch
*/
@SuppressWarnings("deprecation")
public class MongoTemplate implements MongoOperations, ApplicationContextAware {
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class);
private static final String ID_FIELD = "_id";
@@ -230,7 +230,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
mappingContext = this.mongoConverter.getMappingContext();
// We create indexes based on mapping events
if (null != mappingContext && mappingContext instanceof MongoMappingContext) {
indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, mongoDbFactory);
indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, this);
eventPublisher = new MongoMappingEventPublisher(indexCreator);
if (mappingContext instanceof ApplicationEventPublisherAware) {
((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher);
@@ -539,11 +539,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
}
public IndexOperations indexOps(String collectionName) {
return new DefaultIndexOperations(this, collectionName);
return new DefaultIndexOperations(getMongoDbFactory(), collectionName);
}
public IndexOperations indexOps(Class<?> entityClass) {
return new DefaultIndexOperations(this, determineCollectionName(entityClass));
return new DefaultIndexOperations(getMongoDbFactory(), determineCollectionName(entityClass));
}
public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) {
@@ -2039,6 +2039,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
}
}
public PersistenceExceptionTranslator getExceptionTranslator() {
return exceptionTranslator;
}
private MongoPersistentEntity<?> getPersistentEntity(Class<?> type) {
return type == null ? null : mappingContext.getPersistentEntity(type);
}

View File

@@ -0,0 +1,29 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.dao.DataAccessException;
import com.mongodb.MongoException;
import com.mongodb.reactivestreams.client.MongoCollection;
import org.bson.Document;
import org.reactivestreams.Publisher;
public interface ReactiveCollectionCallback<T> {
Publisher<T> doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException;
}

View File

@@ -0,0 +1,27 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.dao.DataAccessException;
import com.mongodb.MongoException;
import com.mongodb.reactivestreams.client.MongoDatabase;
import org.reactivestreams.Publisher;
public interface ReactiveDatabaseCallback<T> {
Publisher<T> doInDB(MongoDatabase db) throws MongoException, DataAccessException;
}

View File

@@ -0,0 +1,58 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.data.mongodb.core.index.IndexDefinition;
import org.springframework.data.mongodb.core.index.IndexInfo;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Index operations on a collection.
*
* @author Mark Paluch
* @since 1.11
*/
public interface ReactiveIndexOperations {
/**
* Ensure that an index for the provided {@link IndexDefinition} exists for the collection indicated by the entity
* class. If not it will be created.
*
* @param indexDefinition must not be {@literal null}.
*/
Mono<String> ensureIndex(IndexDefinition indexDefinition);
/**
* Drops an index from this collection.
*
* @param name name of index to drop
*/
Mono<Void> dropIndex(String name);
/**
* Drops all indices from this collection.
*/
Mono<Void> dropAllIndexes();
/**
* Returns the index information on the collection.
*
* @return index information on the collection
*/
Flux<IndexInfo> getIndexInfo();
}

View File

@@ -0,0 +1,127 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.beans.factory.config.AbstractFactoryBean;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.support.PersistenceExceptionTranslator;
import org.springframework.util.StringUtils;
import com.mongodb.async.client.MongoClientSettings;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoClients;
/**
* Convenient factory for configuring a reactive streams {@link MongoClient}.
*
* @author Mark Paluch
*/
public class ReactiveMongoClientFactoryBean extends AbstractFactoryBean<MongoClient>
implements PersistenceExceptionTranslator {
private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator();
private String connectionString;
private String host;
private Integer port;
private MongoClientSettings mongoClientSettings;
private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR;
/**
* Configures the host to connect to.
*
* @param host
*/
public void setHost(String host) {
this.host = host;
}
/**
* Configures the port to connect to.
*
* @param port
*/
public void setPort(int port) {
this.port = port;
}
/**
* Configures the connection string.
*
* @param connectionString
*/
public void setConnectionString(String connectionString) {
this.connectionString = connectionString;
}
/**
* Configures the mongo client settings.
*
* @param mongoClientSettings
*/
public void setMongoClientSettings(MongoClientSettings mongoClientSettings) {
this.mongoClientSettings = mongoClientSettings;
}
/**
* Configures the {@link PersistenceExceptionTranslator} to use.
*
* @param exceptionTranslator
*/
public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) {
this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator;
}
@Override
public Class<?> getObjectType() {
return MongoClient.class;
}
@Override
protected MongoClient createInstance() throws Exception {
if (mongoClientSettings != null) {
return MongoClients.create(mongoClientSettings);
}
if (StringUtils.hasText(connectionString)) {
return MongoClients.create(connectionString);
}
if (StringUtils.hasText(host)) {
if (port != null) {
return MongoClients.create(String.format("mongodb://%s:%d", host, port));
}
return MongoClients.create(String.format("mongodb://%s", host));
}
throw new IllegalStateException(
"Cannot create MongoClients. One of the following is required: mongoClientSettings, connectionString or host/port");
}
@Override
protected void destroyInstance(MongoClient instance) throws Exception {
instance.close();
}
@Override
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
return DEFAULT_EXCEPTION_TRANSLATOR.translateExceptionIfPossible(ex);
}
}

View File

@@ -0,0 +1,206 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.util.ArrayList;
import java.util.List;
import org.bson.codecs.configuration.CodecRegistry;
import org.springframework.beans.factory.config.AbstractFactoryBean;
import org.springframework.util.Assert;
import com.mongodb.MongoCredential;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
import com.mongodb.WriteConcern;
import com.mongodb.async.client.MongoClientSettings;
import com.mongodb.connection.ClusterSettings;
import com.mongodb.connection.ConnectionPoolSettings;
import com.mongodb.connection.ServerSettings;
import com.mongodb.connection.SocketSettings;
import com.mongodb.connection.SslSettings;
import com.mongodb.connection.StreamFactoryFactory;
/**
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver.
*
* @author Mark Paluch
* @since 1.7
*/
public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
private List<MongoCredential> credentialList = new ArrayList<>();
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings();
private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings();
private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings();
private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings();
private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings();
private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings();
/**
* Set the {@link ReadPreference}.
*
* @param readPreference
*/
public void setReadPreference(ReadPreference readPreference) {
this.readPreference = readPreference;
}
/**
* Set the {@link WriteConcern}.
*
* @param writeConcern
*/
public void setWriteConcern(WriteConcern writeConcern) {
this.writeConcern = writeConcern;
}
/**
* Set the {@link ReadConcern}.
*
* @param readConcern
*/
public void setReadConcern(ReadConcern readConcern) {
this.readConcern = readConcern;
}
/**
* Set the List of {@link MongoCredential}s.
*
* @param credentialList must not be {@literal null}.
*/
public void setCredentialList(List<MongoCredential> credentialList) {
Assert.notNull(credentialList, "CredendialList must not be null!");
this.credentialList.addAll(credentialList);
}
/**
* Adds the {@link MongoCredential} to the list of credentials.
*
* @param mongoCredential must not be {@literal null}.
*/
public void addMongoCredential(MongoCredential mongoCredential) {
Assert.notNull(mongoCredential, "MongoCredential must not be null!");
this.credentialList.add(mongoCredential);
}
/**
* Set the {@link StreamFactoryFactory}.
*
* @param streamFactoryFactory
*/
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
this.streamFactoryFactory = streamFactoryFactory;
}
/**
* Set the {@link CodecRegistry}.
*
* @param codecRegistry
*/
public void setCodecRegistry(CodecRegistry codecRegistry) {
this.codecRegistry = codecRegistry;
}
/**
* Set the {@link ClusterSettings}.
*
* @param clusterSettings
*/
public void setClusterSettings(ClusterSettings clusterSettings) {
this.clusterSettings = clusterSettings;
}
/**
* Set the {@link SocketSettings}.
*
* @param socketSettings
*/
public void setSocketSettings(SocketSettings socketSettings) {
this.socketSettings = socketSettings;
}
/**
* Set the heartbeat {@link SocketSettings}.
*
* @param heartbeatSocketSettings
*/
public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) {
this.heartbeatSocketSettings = heartbeatSocketSettings;
}
/**
* Set the {@link ConnectionPoolSettings}.
*
* @param connectionPoolSettings
*/
public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) {
this.connectionPoolSettings = connectionPoolSettings;
}
/**
* Set the {@link ServerSettings}.
*
* @param serverSettings
*/
public void setServerSettings(ServerSettings serverSettings) {
this.serverSettings = serverSettings;
}
/**
* Set the {@link SslSettings}.
*
* @param sslSettings
*/
public void setSslSettings(SslSettings sslSettings) {
this.sslSettings = sslSettings;
}
@Override
public Class<?> getObjectType() {
return MongoClientSettings.class;
}
@Override
protected MongoClientSettings createInstance() throws Exception {
return MongoClientSettings.builder() //
.readPreference(readPreference) //
.writeConcern(writeConcern) //
.readConcern(readConcern) //
.credentialList(credentialList) //
.streamFactoryFactory(streamFactoryFactory) //
.codecRegistry(codecRegistry) //
.clusterSettings(clusterSettings) //
.socketSettings(socketSettings) //
.heartbeatSocketSettings(heartbeatSocketSettings) //
.connectionPoolSettings(connectionPoolSettings) //
.serverSettings(serverSettings) //
.sslSettings(sslSettings) //
.build();
}
}

View File

@@ -0,0 +1,88 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import com.mongodb.reactivestreams.client.MongoDatabase;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.springframework.transaction.support.ResourceHolderSupport;
import org.springframework.util.Assert;
/**
* @author Mark Paluch
*/
class ReactiveMongoDatabaseHolder extends ResourceHolderSupport {
private static final Object DEFAULT_KEY = new Object();
private final Map<Object, MongoDatabase> dbMap = new ConcurrentHashMap<Object, MongoDatabase>();
public ReactiveMongoDatabaseHolder(MongoDatabase db) {
addMongoDatabase(db);
}
public ReactiveMongoDatabaseHolder(Object key, MongoDatabase db) {
addMongoDatabase(key, db);
}
public MongoDatabase getMongoDatabase() {
return getMongoDatabase(DEFAULT_KEY);
}
public MongoDatabase getMongoDatabase(Object key) {
return this.dbMap.get(key);
}
public MongoDatabase getAnyMongoDatabase() {
if (!this.dbMap.isEmpty()) {
return this.dbMap.values().iterator().next();
}
return null;
}
public void addMongoDatabase(MongoDatabase session) {
addMongoDatabase(DEFAULT_KEY, session);
}
public void addMongoDatabase(Object key, MongoDatabase session) {
Assert.notNull(key, "Key must not be null");
Assert.notNull(session, "DB must not be null");
this.dbMap.put(key, session);
}
public MongoDatabase removeMongoDatabase(Object key) {
return this.dbMap.remove(key);
}
public boolean containsMongoDatabase(MongoDatabase session) {
return this.dbMap.containsValue(session);
}
public boolean isEmpty() {
return this.dbMap.isEmpty();
}
public boolean doesNotHoldNonDefaultMongoDatabase() {
synchronized (this.dbMap) {
return this.dbMap.isEmpty() || (this.dbMap.size() == 1 && this.dbMap.containsKey(DEFAULT_KEY));
}
}
}

View File

@@ -0,0 +1,150 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoDatabase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.mongodb.util.MongoClientVersion;
import org.springframework.transaction.support.TransactionSynchronizationManager;
/**
* Helper class featuring helper methods for internal MongoDb classes. Mainly intended for internal use within the
* framework.
*
* @author Mark Paluch
*/
public abstract class ReactiveMongoDbUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoDbUtils.class);
/**
* Private constructor to prevent instantiation.
*/
private ReactiveMongoDbUtils() {}
/**
* Obtains a {@link MongoDatabase} connection for the given {@link MongoClient} instance and database name
*
* @param mongo the {@link MongoClient} instance, must not be {@literal null}.
* @param databaseName the database name, must not be {@literal null} or empty.
* @return the {@link MongoDatabase} connection
*/
public static MongoDatabase getMongoDatabase(MongoClient mongo, String databaseName) {
return doGetMongoDatabase(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true, databaseName);
}
private static MongoDatabase doGetMongoDatabase(MongoClient mongo, String databaseName, UserCredentials credentials,
boolean allowCreate, String authenticationDatabaseName) {
ReactiveMongoDatabaseHolder dbHolder = (ReactiveMongoDatabaseHolder) TransactionSynchronizationManager
.getResource(mongo);
// Do we have a populated holder and TX sync active?
if (dbHolder != null && !dbHolder.isEmpty() && TransactionSynchronizationManager.isSynchronizationActive()) {
MongoDatabase db = dbHolder.getMongoDatabase(databaseName);
// DB found but not yet synchronized
if (db != null && !dbHolder.isSynchronizedWithTransaction()) {
LOGGER.debug("Registering Spring transaction synchronization for existing MongoDB {}.", databaseName);
TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(dbHolder, mongo));
dbHolder.setSynchronizedWithTransaction(true);
}
if (db != null) {
return db;
}
}
// Lookup fresh database instance
LOGGER.debug("Getting Mongo Database name=[{}]", databaseName);
MongoDatabase db = mongo.getDatabase(databaseName);
// TX sync active, bind new database to thread
if (TransactionSynchronizationManager.isSynchronizationActive()) {
LOGGER.debug("Registering Spring transaction synchronization for MongoDB instance {}.", databaseName);
ReactiveMongoDatabaseHolder holderToUse = dbHolder;
if (holderToUse == null) {
holderToUse = new ReactiveMongoDatabaseHolder(databaseName, db);
} else {
holderToUse.addMongoDatabase(databaseName, db);
}
// synchronize holder only if not yet synchronized
if (!holderToUse.isSynchronizedWithTransaction()) {
TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo));
holderToUse.setSynchronizedWithTransaction(true);
}
if (holderToUse != dbHolder) {
TransactionSynchronizationManager.bindResource(mongo, holderToUse);
}
}
// Check whether we are allowed to return the DB.
if (!allowCreate && !isDBTransactional(db, mongo)) {
throw new IllegalStateException(
"No Mongo DB bound to thread, " + "and configuration does not allow creation of non-transactional one here");
}
return db;
}
/**
* Return whether the given DB instance is transactional, that is, bound to the current thread by Spring's transaction
* facilities.
*
* @param db the DB to check
* @param mongoClient the Mongo instance that the DB was created with (may be <code>null</code>)
* @return whether the DB is transactional
*/
public static boolean isDBTransactional(MongoDatabase db, MongoClient mongoClient) {
if (mongoClient == null) {
return false;
}
ReactiveMongoDatabaseHolder dbHolder = (ReactiveMongoDatabaseHolder) TransactionSynchronizationManager
.getResource(mongoClient);
return dbHolder != null && dbHolder.containsMongoDatabase(db);
}
/**
* Check if credentials present. In case we're using a mongo-java-driver version 3 or above we do not have the need
* for authentication as the auth data has to be provided within the MongoClient
*
* @param credentials
* @return
*/
private static boolean requiresAuthDbAuthentication(UserCredentials credentials) {
if (credentials == null || !credentials.hasUsername()) {
return false;
}
return !MongoClientVersion.isMongo3Driver();
}
}

View File

@@ -0,0 +1,958 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.util.Collection;
import org.bson.Document;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscription;
import org.springframework.data.geo.GeoResult;
import org.springframework.data.geo.GeoResults;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import com.mongodb.ReadPreference;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import com.mongodb.reactivestreams.client.MongoCollection;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Interface that specifies a basic set of MongoDB operations executed in a reactive way.
* <p>
* Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability
* (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using
* {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
*
* @author Mark Paluch
* @see Flux
* @see Mono
* @see http://projectreactor.io/docs/
* @since 2.0
*/
public interface ReactiveMongoOperations {
/**
* Returns the reactive operations that can be performed on indexes
*
* @return index operations on the named collection
*/
ReactiveIndexOperations reactiveIndexOps(String collectionName);
/**
* Returns the reactive operations that can be performed on indexes
*
* @return index operations on the named collection associated with the given entity class
*/
ReactiveIndexOperations reactiveIndexOps(Class<?> entityClass);
/**
* Returns the operations that can be performed on indexes
*
* @return index operations on the named collection
*/
IndexOperations indexOps(String collectionName);
/**
* Returns the operations that can be performed on indexes
*
* @return index operations on the named collection associated with the given entity class
*/
IndexOperations indexOps(Class<?> entityClass);
/**
* Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the
* MongoDB driver to convert the JSON string to a DBObject. Any errors that result from executing this command will be
* converted into Spring's DAO exception hierarchy.
*
* @param jsonCommand a MongoDB command expressed as a JSON string.
* @return a result object returned by the action
*/
Mono<Document> executeCommand(String jsonCommand);
/**
* Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's DAO
* exception hierarchy.
*
* @param command a MongoDB command
* @return a result object returned by the action
*/
Mono<Document> executeCommand(Document command);
/**
* Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's data
* access exception hierarchy.
*
* @param command a MongoDB command, must not be {@literal null}.
* @param readPreference read preferences to use, can be {@literal null}.
* @return a result object returned by the action
*/
Mono<Document> executeCommand(Document command, ReadPreference readPreference);
/**
* Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary.
* <p/>
* Allows for returning a result object, that is a domain object or a collection of domain objects.
*
* @param <T> return type
* @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance.
* @return a result object returned by the action
*/
<T> Flux<T> execute(ReactiveDatabaseCallback<T> action);
/**
* Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class.
* <p/>
* Allows for returning a result object, that is a domain object or a collection of domain objects.
*
* @param entityClass class that determines the collection to use
* @param <T> return type
* @param action callback object that specifies the MongoDB action
* @return a result object returned by the action or <tt>null</tt>
*/
<T> Flux<T> execute(Class<?> entityClass, ReactiveCollectionCallback<T> action);
/**
* Executes the given {@link ReactiveCollectionCallback} on the collection of the given name.
* <p/>
* Allows for returning a result object, that is a domain object or a collection of domain objects.
*
* @param <T> return type
* @param collectionName the name of the collection that specifies which DBCollection instance will be passed into
* @param action callback object that specifies the MongoDB action the callback action.
* @return a result object returned by the action or <tt>null</tt>
*/
<T> Flux<T> execute(String collectionName, ReactiveCollectionCallback<T> action);
/**
* Create an uncapped collection with a name based on the provided entity class.
*
* @param entityClass class that determines the collection to create
* @return the created collection
*/
<T> Mono<MongoCollection<Document>> createCollection(Class<T> entityClass);
/**
* Create a collection with a name based on the provided entity class using the options.
*
* @param entityClass class that determines the collection to create
* @param collectionOptions options to use when creating the collection.
* @return the created collection
*/
<T> Mono<MongoCollection<Document>> createCollection(Class<T> entityClass, CollectionOptions collectionOptions);
/**
* Create an uncapped collection with the provided name.
*
* @param collectionName name of the collection
* @return the created collection
*/
Mono<MongoCollection<Document>> createCollection(String collectionName);
/**
* Create a collection with the provided name and options.
*
* @param collectionName name of the collection
* @param collectionOptions options to use when creating the collection.
* @return the created collection
*/
Mono<MongoCollection<Document>> createCollection(String collectionName, CollectionOptions collectionOptions);
/**
* A set of collection names.
*
* @return list of collection names
*/
Flux<String> getCollectionNames();
/**
* Get a collection by name, creating it if it doesn't exist.
* <p/>
* Translate any exceptions as necessary.
*
* @param collectionName name of the collection
* @return an existing collection or a newly created one.
*/
MongoCollection<Document> getCollection(String collectionName);
/**
* Check to see if a collection with a name indicated by the entity class exists.
* <p/>
* Translate any exceptions as necessary.
*
* @param entityClass class that determines the name of the collection
* @return true if a collection with the given name is found, false otherwise.
*/
<T> Mono<Boolean> collectionExists(Class<T> entityClass);
/**
* Check to see if a collection with a given name exists.
* <p/>
* Translate any exceptions as necessary.
*
* @param collectionName name of the collection
* @return true if a collection with the given name is found, false otherwise.
*/
Mono<Boolean> collectionExists(String collectionName);
/**
* Drop the collection with the name indicated by the entity class.
* <p/>
* Translate any exceptions as necessary.
*
* @param entityClass class that determines the collection to drop/delete.
*/
<T> Mono<Void> dropCollection(Class<T> entityClass);
/**
* Drop the collection with the given name.
* <p/>
* Translate any exceptions as necessary.
*
* @param collectionName name of the collection to drop/delete.
*/
Mono<Void> dropCollection(String collectionName);
/**
* Query for a list of objects of type T from the collection used by the entity class.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
* to map objects since the test for class type is done in the client and not on the server.
*
* @param entityClass the parametrized type of the returned list
* @return the converted collection
*/
<T> Flux<T> findAll(Class<T> entityClass);
/**
* Query for a list of objects of type T from the specified collection.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
* to map objects since the test for class type is done in the client and not on the server.
*
* @param entityClass the parametrized type of the returned list.
* @param collectionName name of the collection to retrieve the objects from
* @return the converted collection
*/
<T> Flux<T> findAll(Class<T> entityClass, String collectionName);
/**
* Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the
* specified type.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification
* @param entityClass the parametrized type of the returned list.
* @return the converted object
*/
<T> Mono<T> findOne(Query query, Class<T> entityClass);
/**
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
* type.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification
* @param entityClass the parametrized type of the returned list.
* @param collectionName name of the collection to retrieve the objects from
* @return the converted object
*/
<T> Mono<T> findOne(Query query, Class<T> entityClass, String collectionName);
/**
* Determine result of given {@link Query} contains at least one element.
*
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param collectionName name of the collection to check for objects.
* @return
*/
Mono<Boolean> exists(Query query, String collectionName);
/**
* Determine result of given {@link Query} contains at least one element.
*
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param entityClass the parametrized type.
* @return
*/
Mono<Boolean> exists(Query query, Class<?> entityClass);
/**
* Determine result of given {@link Query} contains at least one element.
*
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param entityClass the parametrized type.
* @param collectionName name of the collection to check for objects.
* @return
*/
Mono<Boolean> exists(Query query, Class<?> entityClass, String collectionName);
/**
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification
* @param entityClass the parametrized type of the returned list.
* @return the List of converted objects
*/
<T> Flux<T> find(Query query, Class<T> entityClass);
/**
* Map the results of an ad-hoc query on the specified collection to a List of the specified type.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification
* @param entityClass the parametrized type of the returned list.
* @param collectionName name of the collection to retrieve the objects from
* @return the List of converted objects
*/
<T> Flux<T> find(Query query, Class<T> entityClass, String collectionName);
/**
* Returns a document with the given id mapped onto the given class. The collection the query is ran against will be
* derived from the given target class as well.
*
* @param <T>
* @param id the id of the document to return.
* @param entityClass the type the document shall be converted into.
* @return the document with the given id mapped onto the given target class.
*/
<T> Mono<T> findById(Object id, Class<T> entityClass);
/**
* Returns the document with the given id from the given collection mapped onto the given target class.
*
* @param id the id of the document to return
* @param entityClass the type to convert the document to
* @param collectionName the collection to query for the document
* @param <T>
* @return
*/
<T> Mono<T> findById(Object id, Class<T> entityClass, String collectionName);
/**
* Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Will consider entity mapping
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
* results.
*
* @param near must not be {@literal null}.
* @param entityClass must not be {@literal null}.
* @return
*/
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass);
/**
* Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
* particular number of results.
*
* @param near must not be {@literal null}.
* @param entityClass must not be {@literal null}.
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
* will be inspected.
* @return
*/
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
/**
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification.
* @param update the {@link Update} to apply on matching documents.
* @param entityClass the parametrized type.
* @return
*/
<T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass);
/**
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification.
* @param update the {@link Update} to apply on matching documents.
* @param entityClass the parametrized type.
* @param collectionName the collection to query.
* @return
*/
<T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
/**
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification.
* @param update the {@link Update} to apply on matching documents.
* @param options the {@link FindAndModifyOptions} holding additional information.
* @param entityClass the parametrized type.
* @return
*/
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
/**
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification.
* @param update the {@link Update} to apply on matching documents.
* @param options the {@link FindAndModifyOptions} holding additional information.
* @param entityClass the parametrized type.
* @param collectionName the collection to query.
* @return
*/
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
String collectionName);
/**
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
* specified type. The first document that matches the query is returned and also removed from the collection in the
* database.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
* <p/>
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification
* @param entityClass the parametrized type of the returned list.
* @return the converted object
*/
<T> Mono<T> findAndRemove(Query query, Class<T> entityClass);
/**
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
* type. The first document that matches the query is returned and also removed from the collection in the database.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification
* @param entityClass the parametrized type of the returned list.
* @param collectionName name of the collection to retrieve the objects from
* @return the converted object
*/
<T> Mono<T> findAndRemove(Query query, Class<T> entityClass, String collectionName);
/**
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
*
* @param query
* @param entityClass must not be {@literal null}.
* @return
*/
Mono<Long> count(Query query, Class<?> entityClass);
/**
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
* must solely consist of document field references as we lack type information to map potential property references
* onto document fields. TO make sure the query gets mapped, use {@link #count(Query, Class, String)}.
*
* @param query
* @param collectionName must not be {@literal null} or empty.
* @return
* @see #count(Query, Class, String)
*/
Mono<Long> count(Query query, String collectionName);
/**
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
* class to map the given {@link Query}.
*
* @param query
* @param entityClass must not be {@literal null}.
* @param collectionName must not be {@literal null} or empty.
* @return
*/
Mono<Long> count(Query query, Class<?> entityClass, String collectionName);
/**
* Insert the object into the collection for the entity type of the object to save.
* <p/>
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
* <p/>
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
* Spring's Type Conversion"</a> for more details.
* <p/>
* <p/>
* Insert is used to initially store the object into the database. To update an existing object use the save method.
*
* @param objectToSave the object to store in the collection.
* @return
*/
<T> Mono<T> insert(T objectToSave);
/**
* Insert the object into the specified collection.
* <p/>
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* Insert is used to initially store the object into the database. To update an existing object use the save method.
*
* @param objectToSave the object to store in the collection
* @param collectionName name of the collection to store the object in
* @return
*/
<T> Mono<T> insert(T objectToSave, String collectionName);
/**
* Insert a Collection of objects into a collection in a single batch write to the database.
*
* @param batchToSave the list of objects to save.
* @param entityClass class that determines the collection to use
* @return
*/
<T> Flux<T> insert(Collection<? extends T> batchToSave, Class<?> entityClass);
/**
* Insert a list of objects into the specified collection in a single batch write to the database.
*
* @param batchToSave the list of objects to save.
* @param collectionName name of the collection to store the object in
* @return
*/
<T> Flux<T> insert(Collection<? extends T> batchToSave, String collectionName);
/**
* Insert a mixed Collection of objects into a database collection determining the collection name to use based on the
* class.
*
* @param objectsToSave the list of objects to save.
* @return
*/
<T> Flux<T> insertAll(Collection<? extends T> objectsToSave);
/**
* Insert the object into the collection for the entity type of the object to save.
* <p/>
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
* <p/>
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
* Spring's Type Conversion"</a> for more details.
* <p/>
* <p/>
* Insert is used to initially store the object into the database. To update an existing object use the save method.
*
* @param objectToSave the object to store in the collection.
* @return
*/
<T> Mono<T> insert(Mono<? extends T> objectToSave);
/**
* Insert a Collection of objects into a collection in a single batch write to the database.
*
* @param batchToSave the publisher which provides objects to save.
* @param entityClass class that determines the collection to use
* @return
*/
<T> Flux<T> insert(Publisher<? extends T> batchToSave, Class<?> entityClass);
/**
* Insert a list of objects into the specified collection in a single batch write to the database.
*
* @param batchToSave the publisher which provides objects to save.
* @param collectionName name of the collection to store the object in
* @return
*/
<T> Flux<T> insert(Publisher<? extends T> batchToSave, String collectionName);
/**
* Insert a mixed Collection of objects into a database collection determining the collection name to use based on the
* class.
*
* @param objectsToSave the publisher which provides objects to save.
* @return
*/
<T> Flux<T> insertAll(Publisher<? extends T> objectsToSave);
/**
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
* object is not already present, that is an 'upsert'.
* <p/>
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
* Spring's Type Conversion"</a> for more details.
*
* @param objectToSave the object to store in the collection
* @return
*/
<T> Mono<T> save(T objectToSave);
/**
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
* is an 'upsert'.
* <p/>
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
* http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert">Spring's
* Type Conversion"</a> for more details.
*
* @param objectToSave the object to store in the collection
* @param collectionName name of the collection to store the object in
* @return
*/
<T> Mono<T> save(T objectToSave, String collectionName);
/**
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
* object is not already present, that is an 'upsert'.
* <p/>
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
* Spring's Type Conversion"</a> for more details.
*
* @param objectToSave the object to store in the collection
* @return
*/
<T> Mono<T> save(Mono<? extends T> objectToSave);
/**
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
* is an 'upsert'.
* <p/>
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
* http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert">Spring's
* Type Conversion"</a> for more details.
*
* @param objectToSave the object to store in the collection
* @param collectionName name of the collection to store the object in
* @return
*/
<T> Mono<T> save(Mono<? extends T> objectToSave, String collectionName);
/**
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
* combining the query document and the update document.
*
* @param query the query document that specifies the criteria used to select a record to be upserted
* @param update the update document that contains the updated object or $ operators to manipulate the existing object
* @param entityClass class that determines the collection to use
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass);
/**
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
* combining the query document and the update document.
*
* @param query the query document that specifies the criteria used to select a record to be updated
* @param update the update document that contains the updated object or $ operators to manipulate the existing
* object.
* @param collectionName name of the collection to update the object in
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> upsert(Query query, Update update, String collectionName);
/**
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
* combining the query document and the update document.
*
* @param query the query document that specifies the criteria used to select a record to be upserted
* @param update the update document that contains the updated object or $ operators to manipulate the existing object
* @param entityClass class of the pojo to be operated on
* @param collectionName name of the collection to update the object in
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass, String collectionName);
/**
* Updates the first object that is found in the collection of the entity class that matches the query document with
* the provided update document.
*
* @param query the query document that specifies the criteria used to select a record to be updated
* @param update the update document that contains the updated object or $ operators to manipulate the existing
* object.
* @param entityClass class that determines the collection to use
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass);
/**
* Updates the first object that is found in the specified collection that matches the query document criteria with
* the provided updated document.
*
* @param query the query document that specifies the criteria used to select a record to be updated
* @param update the update document that contains the updated object or $ operators to manipulate the existing
* object.
* @param collectionName name of the collection to update the object in
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> updateFirst(Query query, Update update, String collectionName);
/**
* Updates the first object that is found in the specified collection that matches the query document criteria with
* the provided updated document.
*
* @param query the query document that specifies the criteria used to select a record to be updated
* @param update the update document that contains the updated object or $ operators to manipulate the existing
* object.
* @param entityClass class of the pojo to be operated on
* @param collectionName name of the collection to update the object in
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
/**
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
* with the provided updated document.
*
* @param query the query document that specifies the criteria used to select a record to be updated
* @param update the update document that contains the updated object or $ operators to manipulate the existing
* object.
* @param entityClass class that determines the collection to use
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> updateMulti(Query query, Update update, Class<?> entityClass);
/**
* Updates all objects that are found in the specified collection that matches the query document criteria with the
* provided updated document.
*
* @param query the query document that specifies the criteria used to select a record to be updated
* @param update the update document that contains the updated object or $ operators to manipulate the existing
* object.
* @param collectionName name of the collection to update the object in
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> updateMulti(Query query, Update update, String collectionName);
/**
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
* with the provided updated document.
*
* @param query the query document that specifies the criteria used to select a record to be updated
* @param update the update document that contains the updated object or $ operators to manipulate the existing
* object.
* @param entityClass class of the pojo to be operated on
* @param collectionName name of the collection to update the object in
* @return the WriteResult which lets you access the results of the previous write.
*/
Mono<UpdateResult> updateMulti(final Query query, final Update update, Class<?> entityClass, String collectionName);
/**
* Remove the given object from the collection by id.
*
* @param object
* @return
*/
Mono<DeleteResult> remove(Object object);
/**
* Removes the given object from the given collection.
*
* @param object
* @param collection must not be {@literal null} or empty.
*/
Mono<DeleteResult> remove(Object object, String collection);
/**
* Remove the given object from the collection by id.
*
* @param objectToRemove
* @return
*/
Mono<DeleteResult> remove(Mono<? extends Object> objectToRemove);
/**
* Removes the given object from the given collection.
*
* @param objectToRemove
* @param collection must not be {@literal null} or empty.
* @return
*/
Mono<DeleteResult> remove(Mono<? extends Object> objectToRemove, String collection);
/**
* Remove all documents that match the provided query document criteria from the the collection used to store the
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
*
* @param query
* @param entityClass
* @return
*/
Mono<DeleteResult> remove(Query query, Class<?> entityClass);
/**
* Remove all documents that match the provided query document criteria from the the collection used to store the
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
*
* @param query
* @param entityClass
* @param collectionName
* @return
*/
Mono<DeleteResult> remove(Query query, Class<?> entityClass, String collectionName);
/**
* Remove all documents from the specified collection that match the provided query document criteria. There is no
* conversion/mapping done for any criteria using the id field.
*
* @param query the query document that specifies the criteria used to remove a record
* @param collectionName name of the collection where the objects will removed
*/
Mono<DeleteResult> remove(Query query, String collectionName);
/**
* Returns and removes all documents form the specified collection that match the provided query.
*
* @param query
* @param collectionName
* @return
*/
<T> Flux<T> findAllAndRemove(Query query, String collectionName);
/**
* Returns and removes all documents matching the given query form the collection used to store the entityClass.
*
* @param query
* @param entityClass
* @return
*/
<T> Flux<T> findAllAndRemove(Query query, Class<T> entityClass);
/**
* Returns and removes all documents that match the provided query document criteria from the the collection used to
* store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in
* the query.
*
* @param query
* @param entityClass
* @param collectionName
* @return
*/
<T> Flux<T> findAllAndRemove(Query query, Class<T> entityClass, String collectionName);
/**
* Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified
* type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite
* stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
* {@link Subscription#cancel() canceled}.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification
* @param entityClass the parametrized type of the returned list.
* @return the List of converted objects
*/
<T> Flux<T> tail(Query query, Class<T> entityClass);
/**
* Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified
* type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite
* stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
* {@link Subscription#cancel() canceled}.
* <p/>
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* <p/>
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification
* @param entityClass the parametrized type of the returned list.
* @param collectionName name of the collection to retrieve the objects from
* @return the List of converted objects
*/
<T> Flux<T> tail(Query query, Class<T> entityClass, String collectionName);
/**
* Returns the underlying {@link MongoConverter}.
*
* @return
*/
MongoConverter getConverter();
}

View File

@@ -0,0 +1,134 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.net.UnknownHostException;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.support.PersistenceExceptionTranslator;
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
import org.springframework.util.Assert;
import com.mongodb.ConnectionString;
import com.mongodb.WriteConcern;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoClients;
import com.mongodb.reactivestreams.client.MongoDatabase;
/**
* Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance.
*
* @author Mark Paluch
* @since 2.0
*/
public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, ReactiveMongoDatabaseFactory {
private final MongoClient mongo;
private final String databaseName;
private final boolean mongoInstanceCreated;
private final PersistenceExceptionTranslator exceptionTranslator;
private WriteConcern writeConcern;
/**
* Creates a new {@link SimpleReactiveMongoDatabaseFactory} instance from the given {@link ConnectionString}.
*
* @param connectionString must not be {@literal null}.
* @throws UnknownHostException
*/
public SimpleReactiveMongoDatabaseFactory(ConnectionString connectionString) throws UnknownHostException {
this(MongoClients.create(connectionString), connectionString.getDatabase(), true);
}
/**
* Creates a new {@link SimpleReactiveMongoDatabaseFactory} instance from the given {@link MongoClient}.
*
* @param mongoClient must not be {@literal null}.
* @param databaseName must not be {@literal null}.
* @since 1.7
*/
public SimpleReactiveMongoDatabaseFactory(MongoClient mongoClient, String databaseName) {
this(mongoClient, databaseName, false);
}
private SimpleReactiveMongoDatabaseFactory(MongoClient client, String databaseName, boolean mongoInstanceCreated) {
Assert.notNull(client, "MongoClient must not be null!");
Assert.hasText(databaseName, "Database name must not be empty!");
Assert.isTrue(databaseName.matches("[\\w-]+"),
"Database name must only contain letters, numbers, underscores and dashes!");
this.mongo = client;
this.databaseName = databaseName;
this.mongoInstanceCreated = mongoInstanceCreated;
this.exceptionTranslator = new MongoExceptionTranslator();
}
/**
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
*
* @param writeConcern the writeConcern to set
*/
public void setWriteConcern(WriteConcern writeConcern) {
this.writeConcern = writeConcern;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase()
*/
public MongoDatabase getMongoDatabase() throws DataAccessException {
return getMongoDatabase(databaseName);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase(java.lang.String)
*/
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
Assert.hasText(dbName, "Database name must not be empty.");
MongoDatabase db = ReactiveMongoDbUtils.getMongoDatabase(mongo, dbName);
if (writeConcern != null) {
db = db.withWriteConcern(writeConcern);
}
return db;
}
/**
* Clean up the Mongo instance if it was created by the factory itself.
*
* @see DisposableBean#destroy()
*/
public void destroy() throws Exception {
if (mongoInstanceCreated) {
mongo.close();
}
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getExceptionTranslator()
*/
public PersistenceExceptionTranslator getExceptionTranslator() {
return this.exceptionTranslator;
}
}

View File

@@ -15,6 +15,8 @@
*/
package org.springframework.data.mongodb.core.convert;
import reactor.core.publisher.Flux;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.MalformedURLException;
@@ -29,6 +31,7 @@ import java.util.concurrent.atomic.AtomicLong;
import org.bson.Document;
import org.bson.types.Code;
import org.bson.types.ObjectId;
import org.reactivestreams.Publisher;
import org.springframework.core.convert.ConversionFailedException;
import org.springframework.core.convert.TypeDescriptor;
import org.springframework.core.convert.converter.ConditionalConverter;
@@ -79,6 +82,7 @@ abstract class MongoConverters {
converters.add(DocumentToNamedMongoScriptConverter.INSTANCE);
converters.add(CurrencyToStringConverter.INSTANCE);
converters.add(StringToCurrencyConverter.INSTANCE);
converters.add(NumberToNumberConverterFactory.INSTANCE);
converters.add(AtomicIntegerToIntegerConverter.INSTANCE);
converters.add(AtomicLongToLongConverter.INSTANCE);
converters.add(LongToAtomicLongConverter.INSTANCE);
@@ -293,6 +297,26 @@ abstract class MongoConverters {
}
}
@ReadingConverter
public static enum PublisherToFluxConverter implements Converter<Publisher<?>, Flux<?>> {
INSTANCE;
/*
* (non-Javadoc)
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
*/
@Override
public Flux<?> convert(Publisher<?> source) {
if(source instanceof Flux){
return (Flux<?>) source;
}
return Flux.from((Publisher<?>) source);
}
}
/**
* {@link Converter} implementation converting ISO 4217 {@link String} into {@link Currency}.
*

View File

@@ -15,18 +15,22 @@
*/
package org.springframework.data.mongodb.core.index;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationListener;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.support.PersistenceExceptionTranslator;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.context.MappingContextEvent;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.UncategorizedMongoDbException;
import org.springframework.data.mongodb.core.IndexOperations;
import org.springframework.data.mongodb.core.IndexOperationsProvider;
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
@@ -36,8 +40,6 @@ import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
import com.mongodb.MongoException;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.IndexOptions;
/**
* Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext}
@@ -49,25 +51,25 @@ import com.mongodb.client.model.IndexOptions;
* @author Johno Crawford
* @author Laurent Canet
* @author Christoph Strobl
* @author Mark Paluch
*/
public class MongoPersistentEntityIndexCreator implements ApplicationListener<MappingContextEvent<?, ?>> {
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
private final Map<Class<?>, Boolean> classesSeen = new ConcurrentHashMap<Class<?>, Boolean>();
private final MongoDbFactory mongoDbFactory;
private final IndexOperationsProvider indexOperationsProvider;
private final MongoMappingContext mappingContext;
private final IndexResolver indexResolver;
/**
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
* {@link MongoDbFactory}.
*
* @param mappingContext must not be {@literal null}.
* @param mongoDbFactory must not be {@literal null}.
* @param indexOperationsProvider must not be {@literal null}.
*/
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory) {
this(mappingContext, mongoDbFactory, new MongoPersistentEntityIndexResolver(mappingContext));
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, IndexOperationsProvider indexOperationsProvider) {
this(mappingContext, indexOperationsProvider, new MongoPersistentEntityIndexResolver(mappingContext));
}
/**
@@ -78,14 +80,13 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
* @param mongoDbFactory must not be {@literal null}.
* @param indexResolver must not be {@literal null}.
*/
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory,
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, IndexOperationsProvider indexOperationsProvider,
IndexResolver indexResolver) {
Assert.notNull(mongoDbFactory);
Assert.notNull(indexOperationsProvider);
Assert.notNull(mappingContext);
Assert.notNull(indexResolver);
this.mongoDbFactory = mongoDbFactory;
this.indexOperationsProvider = indexOperationsProvider;
this.mappingContext = mappingContext;
this.indexResolver = indexResolver;
@@ -141,64 +142,15 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
try {
IndexOptions ops = new IndexOptions();
IndexOperations indexOperations = indexOperationsProvider.indexOps(indexDefinition.getCollection());
indexOperations.ensureIndex(indexDefinition);
if (indexDefinition.getIndexOptions() != null) {
} catch (UncategorizedMongoDbException ex) {
org.bson.Document indexOptions = indexDefinition.getIndexOptions();
if (ex.getCause() instanceof MongoException &&
MongoDbErrorCodes.isDataIntegrityViolationCode(((MongoException) ex.getCause()).getCode())) {
if (indexOptions.containsKey("name")) {
ops = ops.name(indexOptions.get("name").toString());
}
if (indexOptions.containsKey("unique")) {
ops = ops.unique((Boolean) indexOptions.get("unique"));
}
if (indexOptions.containsKey("sparse")) {
ops = ops.sparse((Boolean) indexOptions.get("sparse"));
}
if (indexOptions.containsKey("background")) {
ops = ops.background((Boolean) indexOptions.get("background"));
}
if (indexOptions.containsKey("expireAfterSeconds")) {
ops = ops.expireAfter((Long) indexOptions.get("expireAfterSeconds"), TimeUnit.SECONDS);
}
if (indexOptions.containsKey("min")) {
ops = ops.min(((Number) indexOptions.get("min")).doubleValue());
}
if (indexOptions.containsKey("max")) {
ops = ops.max(((Number) indexOptions.get("max")).doubleValue());
}
if (indexOptions.containsKey("bits")) {
ops = ops.bits((Integer) indexOptions.get("bits"));
}
if (indexOptions.containsKey("bucketSize")) {
ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue());
}
if (indexOptions.containsKey("default_language")) {
ops = ops.defaultLanguage(indexOptions.get("default_language").toString());
}
if (indexOptions.containsKey("language_override")) {
ops = ops.languageOverride(indexOptions.get("language_override").toString());
}
if (indexOptions.containsKey("weights")) {
ops = ops.weights((org.bson.Document) indexOptions.get("weights"));
}
for (String key : indexOptions.keySet()) {
if (ObjectUtils.nullSafeEquals("2dsphere", indexOptions.get(key))) {
ops = ops.sphereVersion(2);
}
}
}
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection(), Document.class)
.createIndex(indexDefinition.getIndexKeys(), ops);
} catch (MongoException ex) {
if (MongoDbErrorCodes.isDataIntegrityViolationCode(ex.getCode())) {
org.bson.Document existingIndex = fetchIndexInformation(indexDefinition);
IndexInfo existingIndex = fetchIndexInformation(indexDefinition);
String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'.";
if (existingIndex != null) {
@@ -208,12 +160,10 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
throw new DataIntegrityViolationException(
String.format(message, indexDefinition.getPath(), indexDefinition.getCollection(),
indexDefinition.getIndexKeys(), indexDefinition.getIndexOptions(), existingIndex),
ex);
ex.getCause());
}
RuntimeException exceptionToThrow = mongoDbFactory.getExceptionTranslator().translateExceptionIfPossible(ex);
throw exceptionToThrow != null ? exceptionToThrow : ex;
throw ex;
}
}
@@ -227,7 +177,7 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
return this.mappingContext.equals(context);
}
private org.bson.Document fetchIndexInformation(IndexDefinitionHolder indexDefinition) {
private IndexInfo fetchIndexInformation(IndexDefinitionHolder indexDefinition) {
if (indexDefinition == null) {
return null;
@@ -235,18 +185,15 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
try {
IndexOperations indexOperations = indexOperationsProvider.indexOps(indexDefinition.getCollection());
Object indexNameToLookUp = indexDefinition.getIndexOptions().get("name");
MongoCursor<org.bson.Document> cursor = mongoDbFactory.getDb().getCollection(indexDefinition.getCollection())
.listIndexes(org.bson.Document.class).iterator();
List<IndexInfo> existingIndexes = indexOperations.getIndexInfo();
while (cursor.hasNext()) {
org.bson.Document index = cursor.next();
if (ObjectUtils.nullSafeEquals(indexNameToLookUp, index.get("name"))) {
return index;
}
}
return existingIndexes.stream().//
filter(indexInfo -> ObjectUtils.nullSafeEquals(indexNameToLookUp, indexInfo.getName())).//
findFirst().//
orElse(null);
} catch (Exception e) {
LOGGER.debug(

View File

@@ -0,0 +1,50 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.data.annotation.QueryAnnotation;
import reactor.core.Cancellation;
/**
* Annotation to declare an infinite stream using repository query methods. An infinite stream uses MongoDB's
* {@link com.mongodb.CursorType#TailableAwait tailable} cursors to retrieve data from a capped collection and stream
* data as it is inserted into the collection. An infinite stream can only be used with streams that emit more than one
* element, such as {@link reactor.core.publisher.Flux} or {@link rx.Observable}.
* <p>
* The stream may become dead, or invalid, if either the query returns no match or the cursor returns the document at
* the "end" of the collection and then the application deletes that document.
* <p>
* A stream that is no longer in use must be {@link Cancellation#dispose()} disposed} otherwise the streams will linger
* and exhaust resources.
*
* @author Mark Paluch
* @see <a href="https://docs.mongodb.com/manual/core/tailable-cursors/">Tailable Cursors</a>
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE })
@Documented
@QueryAnnotation
public @interface InfiniteStream {
}

View File

@@ -0,0 +1,78 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository;
import java.io.Serializable;
import org.reactivestreams.Publisher;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.Sort;
import org.springframework.data.repository.NoRepositoryBean;
import org.springframework.data.repository.reactive.ReactivePagingAndSortingRepository;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Mongo specific {@link org.springframework.data.repository.Repository} interface with reactive support.
*
* @author Mark Paluch
* @since 2.0
*/
@NoRepositoryBean
public interface ReactiveMongoRepository<T, ID extends Serializable> extends ReactivePagingAndSortingRepository<T, ID> {
/**
* Inserts the given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use
* the returned instance for further operations as the save operation might have changed the entity instance
* completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API.
*
* @param entity must not be {@literal null}.
* @return the saved entity
*/
<S extends T> Mono<S> insert(S entity);
/**
* Inserts the given entities. Assumes the instance to be new to be able to apply insertion optimizations. Use
* the returned instance for further operations as the save operation might have changed the entity instance
* completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API.
*
* @param entities must not be {@literal null}.
* @return the saved entity
*/
<S extends T> Flux<S> insert(Iterable<S> entities);
/**
* Inserts the given entities. Assumes the instance to be new to be able to apply insertion optimizations. Use
* the returned instance for further operations as the save operation might have changed the entity instance
* completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API.
*
* @param entities must not be {@literal null}.
* @return the saved entity
*/
<S extends T> Flux<S> insert(Publisher<S> entities);
/* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
*/
<S extends T> Flux<S> findAll(Example<S> example);
/* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
*/
<S extends T> Flux<S> findAll(Example<S> example, Sort sort);
}

View File

@@ -0,0 +1,141 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.context.annotation.ComponentScan.Filter;
import org.springframework.context.annotation.Import;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactoryBean;
import org.springframework.data.repository.config.DefaultRepositoryBaseClass;
import org.springframework.data.repository.query.QueryLookupStrategy;
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
/**
* Annotation to activate reactive MongoDB repositories. If no base package is configured through either
* {@link #value()}, {@link #basePackages()} or {@link #basePackageClasses()} it will trigger scanning of the package of
* annotated class.
*
* @author Mark Paluch
* @since 2.0
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
@Import(ReactiveMongoRepositoriesRegistrar.class)
public @interface EnableReactiveMongoRepositories {
/**
* Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.:
* {@code @EnableReactiveMongoRepositories("org.my.pkg")} instead of
* {@code @EnableReactiveMongoRepositories(basePackages="org.my.pkg")}.
*/
String[] value() default {};
/**
* Base packages to scan for annotated components. {@link #value()} is an alias for (and mutually exclusive with) this
* attribute. Use {@link #basePackageClasses()} for a type-safe alternative to String-based package names.
*/
String[] basePackages() default {};
/**
* Type-safe alternative to {@link #basePackages()} for specifying the packages to scan for annotated components. The
* package of each class specified will be scanned. Consider creating a special no-op marker class or interface in
* each package that serves no purpose other than being referenced by this attribute.
*/
Class<?>[] basePackageClasses() default {};
/**
* Specifies which types are eligible for component scanning. Further narrows the set of candidate components from
* everything in {@link #basePackages()} to everything in the base packages that matches the given filter or filters.
*/
Filter[] includeFilters() default {};
/**
* Specifies which types are not eligible for component scanning.
*/
Filter[] excludeFilters() default {};
/**
* Returns the postfix to be used when looking up custom repository implementations. Defaults to {@literal Impl}. So
* for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning
* for {@code PersonRepositoryImpl}.
*
* @return
*/
String repositoryImplementationPostfix() default "Impl";
/**
* Configures the location of where to find the Spring Data named queries properties file. Will default to
* {@code META-INF/mongo-named-queries.properties}.
*
* @return
*/
String namedQueriesLocation() default "";
/**
* Returns the key of the {@link QueryLookupStrategy} to be used for lookup queries for query methods. Defaults to
* {@link Key#CREATE_IF_NOT_FOUND}.
*
* @return
*/
Key queryLookupStrategy() default Key.CREATE_IF_NOT_FOUND;
/**
* Returns the {@link FactoryBean} class to be used for each repository instance. Defaults to
* {@link MongoRepositoryFactoryBean}.
*
* @return
*/
Class<?> repositoryFactoryBeanClass() default ReactiveMongoRepositoryFactoryBean.class;
/**
* Configure the repository base class to be used to create repository proxies for this particular configuration.
*
* @return
*/
Class<?> repositoryBaseClass() default DefaultRepositoryBaseClass.class;
/**
* Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected.
*
* @return
*/
String reactiveMongoTemplateRef() default "reactiveMongoTemplate";
/**
* Whether to automatically create indexes for query methods defined in the repository interface.
*
* @return
*/
boolean createIndexesForQueryMethods() default false;
/**
* Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the
* repositories infrastructure.
*/
boolean considerNestedRepositories() default false;
}

View File

@@ -18,23 +18,34 @@ package org.springframework.data.mongodb.repository.config;
import java.lang.annotation.Annotation;
import java.util.Collection;
import java.util.Collections;
import java.util.stream.Collectors;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.io.ResourceLoader;
import org.springframework.data.config.ParsingUtils;
import org.springframework.data.mongodb.config.BeanNames;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource;
import org.springframework.data.repository.config.RepositoryConfiguration;
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport;
import org.springframework.data.repository.config.RepositoryConfigurationSource;
import org.springframework.data.repository.config.XmlRepositoryConfigurationSource;
import org.springframework.data.repository.util.ReactiveWrappers;
import org.w3c.dom.Element;
/**
* {@link RepositoryConfigurationExtension} for MongoDB.
*
* @author Oliver Gierke
* @author Mark Paluch
*/
public class MongoRepositoryConfigurationExtension extends RepositoryConfigurationExtensionSupport {
@@ -73,7 +84,7 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati
*/
@Override
protected Collection<Class<? extends Annotation>> getIdentifyingAnnotations() {
return Collections.<Class<? extends Annotation>>singleton(Document.class);
return Collections.singleton(Document.class);
}
/*
@@ -82,7 +93,7 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati
*/
@Override
protected Collection<Class<?>> getIdentifyingTypes() {
return Collections.<Class<?>>singleton(MongoRepository.class);
return Collections.singleton(MongoRepository.class);
}
/*
@@ -110,4 +121,43 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati
builder.addPropertyReference("mongoOperations", attributes.getString("mongoTemplateRef"));
builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods"));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#registerBeansForRoot(org.springframework.beans.factory.support.BeanDefinitionRegistry, org.springframework.data.repository.config.RepositoryConfigurationSource)
*/
@Override
public void registerBeansForRoot(BeanDefinitionRegistry registry, RepositoryConfigurationSource configurationSource) {
super.registerBeansForRoot(registry, configurationSource);
if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME)) {
RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class);
definition.setRole(AbstractBeanDefinition.ROLE_INFRASTRUCTURE);
definition.setSource(configurationSource.getSource());
registry.registerBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME, definition);
}
}
@Override
public <T extends RepositoryConfigurationSource> Collection<RepositoryConfiguration<T>> getRepositoryConfigurations(
T configSource, ResourceLoader loader, boolean strictMatchesOnly) {
Collection<RepositoryConfiguration<T>> repositoryConfigurations = super.getRepositoryConfigurations(configSource,
loader, strictMatchesOnly);
if (ReactiveWrappers.isAvailable()) {
return repositoryConfigurations.stream().filter(configuration -> {
Class<?> repositoryInterface = super.loadRepositoryInterface(configuration, loader);
return !RepositoryType.isReactiveRepository(repositoryInterface);
}).collect(Collectors.toList());
}
return repositoryConfigurations;
}
}

View File

@@ -0,0 +1,49 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import java.lang.annotation.Annotation;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
import org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport;
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
/**
* Mongo-specific {@link ImportBeanDefinitionRegistrar}.
*
* @author Mark Paluch
*/
class ReactiveMongoRepositoriesRegistrar extends RepositoryBeanDefinitionRegistrarSupport {
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getAnnotation()
*/
@Override
protected Class<? extends Annotation> getAnnotation() {
return EnableReactiveMongoRepositories.class;
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getExtension()
*/
@Override
protected RepositoryConfigurationExtension getExtension() {
return new ReactiveMongoRepositoryConfigurationExtension();
}
}

View File

@@ -0,0 +1,171 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import java.lang.annotation.Annotation;
import java.util.Collection;
import java.util.Collections;
import java.util.stream.Collectors;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.io.ResourceLoader;
import org.springframework.data.config.ParsingUtils;
import org.springframework.data.mongodb.config.BeanNames;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.repository.ReactiveMongoRepository;
import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactoryBean;
import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource;
import org.springframework.data.repository.config.RepositoryConfiguration;
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport;
import org.springframework.data.repository.config.RepositoryConfigurationSource;
import org.springframework.data.repository.config.XmlRepositoryConfigurationSource;
import org.w3c.dom.Element;
/**
* Reactive {@link RepositoryConfigurationExtension} for MongoDB.
*
* @author Mark Paluch
*/
public class ReactiveMongoRepositoryConfigurationExtension extends RepositoryConfigurationExtensionSupport {
private static final String MONGO_TEMPLATE_REF = "reactive-mongo-template-ref";
private static final String CREATE_QUERY_INDEXES = "create-query-indexes";
private boolean fallbackMappingContextCreated = false;
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModuleName()
*/
@Override
public String getModuleName() {
return "Reactive MongoDB";
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModulePrefix()
*/
@Override
protected String getModulePrefix() {
return "mongo";
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtension#getRepositoryFactoryClassName()
*/
public String getRepositoryFactoryClassName() {
return ReactiveMongoRepositoryFactoryBean.class.getName();
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingAnnotations()
*/
@Override
protected Collection<Class<? extends Annotation>> getIdentifyingAnnotations() {
return Collections.singleton(Document.class);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingTypes()
*/
@Override
protected Collection<Class<?>> getIdentifyingTypes() {
return Collections.singleton(ReactiveMongoRepository.class);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.RepositoryConfigurationSource)
*/
@Override
public void postProcess(BeanDefinitionBuilder builder, RepositoryConfigurationSource source) {
if (fallbackMappingContextCreated) {
builder.addPropertyReference("mappingContext", BeanNames.MAPPING_CONTEXT_BEAN_NAME);
}
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.XmlRepositoryConfigurationSource)
*/
@Override
public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfigurationSource config) {
Element element = config.getElement();
ParsingUtils.setPropertyReference(builder, element, MONGO_TEMPLATE_REF, "reactiveMongoOperations");
ParsingUtils.setPropertyValue(builder, element, CREATE_QUERY_INDEXES, "createIndexesForQueryMethods");
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource)
*/
@Override
public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfigurationSource config) {
AnnotationAttributes attributes = config.getAttributes();
builder.addPropertyReference("reactiveMongoOperations", attributes.getString("reactiveMongoTemplateRef"));
builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods"));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#registerBeansForRoot(org.springframework.beans.factory.support.BeanDefinitionRegistry, org.springframework.data.repository.config.RepositoryConfigurationSource)
*/
@Override
public void registerBeansForRoot(BeanDefinitionRegistry registry, RepositoryConfigurationSource configurationSource) {
super.registerBeansForRoot(registry, configurationSource);
if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME)) {
RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class);
definition.setRole(AbstractBeanDefinition.ROLE_INFRASTRUCTURE);
definition.setSource(configurationSource.getSource());
registry.registerBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME, definition);
}
}
@Override
public <T extends RepositoryConfigurationSource> Collection<RepositoryConfiguration<T>> getRepositoryConfigurations(
T configSource, ResourceLoader loader, boolean strictMatchesOnly) {
Collection<RepositoryConfiguration<T>> repositoryConfigurations = super.getRepositoryConfigurations(configSource,
loader, strictMatchesOnly);
return repositoryConfigurations.stream().filter(configuration -> {
Class<?> repositoryInterface = super.loadRepositoryInterface(configuration, loader);
return RepositoryType.isReactiveRepository(repositoryInterface);
}).collect(Collectors.toList());
}
}

View File

@@ -0,0 +1,70 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import java.lang.reflect.Method;
import org.springframework.data.repository.util.ReactiveWrappers;
import lombok.experimental.UtilityClass;
/**
* Utility class to discover whether a repository interface uses reactive wrapper types.
*
* @author Mark Paluch
*/
@UtilityClass
class RepositoryType {
/**
* @param repositoryInterface
* @return {@literal true} if the {@code repositoryInterface} uses reactive wrapper types.
* @see ReactiveWrappers
* @see ReactiveWrappers#isAvailable()
*/
public static boolean isReactiveRepository(Class<?> repositoryInterface) {
if (!ReactiveWrappers.isAvailable()) {
return false;
}
Method[] methods = repositoryInterface.getMethods();
for (Method method : methods) {
if (usesReactiveWrappers(method)) {
return true;
}
}
return false;
}
private static boolean usesReactiveWrappers(Method method) {
if (ReactiveWrappers.supports(method.getReturnType())) {
return true;
}
for (Class<?> parameterType : method.getParameterTypes()) {
if (ReactiveWrappers.supports(parameterType)) {
return true;
}
}
return false;
}
}

View File

@@ -0,0 +1,212 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import org.reactivestreams.Publisher;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.convert.EntityInstantiators;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.CollectionExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.DeleteExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.PagedExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingConverter;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.SingleEntityExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.SlicedExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.TailExecution;
import org.springframework.data.repository.query.ParameterAccessor;
import org.springframework.data.repository.query.RepositoryQuery;
import org.springframework.data.repository.query.ResultProcessor;
import org.springframework.data.repository.util.ReactiveWrapperConverters;
import org.springframework.util.Assert;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Base class for reactive {@link RepositoryQuery} implementations for MongoDB.
*
* @author Mark Paluch
*/
public abstract class AbstractReactiveMongoQuery implements RepositoryQuery {
private final MongoQueryMethod method;
private final ReactiveMongoOperations operations;
private final EntityInstantiators instantiators;
/**
* Creates a new {@link AbstractReactiveMongoQuery} from the given {@link MongoQueryMethod} and
* {@link MongoOperations}.
*
* @param method must not be {@literal null}.
* @param operations must not be {@literal null}.
* @param conversionService must not be {@literal null}.
*/
public AbstractReactiveMongoQuery(MongoQueryMethod method, ReactiveMongoOperations operations,
ConversionService conversionService) {
Assert.notNull(method, "MongoQueryMethod must not be null!");
Assert.notNull(operations, "ReactiveMongoOperations must not be null!");
this.method = method;
this.operations = operations;
this.instantiators = new EntityInstantiators();
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.RepositoryQuery#getQueryMethod()
*/
public MongoQueryMethod getQueryMethod() {
return method;
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.RepositoryQuery#execute(java.lang.Object[])
*/
public Object execute(Object[] parameters) {
boolean hasReactiveParameters = hasReactiveWrapperParameter();
if (hasReactiveParameters) {
return executeDeferred(parameters);
}
return execute(new MongoParametersParameterAccessor(method, parameters));
}
@SuppressWarnings("unchecked")
private Object executeDeferred(Object[] parameters) {
ReactiveMongoParameterAccessor parameterAccessor = new ReactiveMongoParameterAccessor(method, parameters);
if (getQueryMethod().isCollectionQuery()) {
return Flux.defer(() -> (Publisher<Object>) execute(parameterAccessor));
}
return Mono.defer(() -> (Mono<Object>) execute(parameterAccessor));
}
private Object execute(MongoParameterAccessor parameterAccessor) {
Query query = createQuery(new ConvertingParameterAccessor(operations.getConverter(), parameterAccessor));
applyQueryMetaAttributesWhenPresent(query);
ResultProcessor processor = method.getResultProcessor().withDynamicProjection(parameterAccessor);
String collection = method.getEntityInformation().getCollectionName();
ReactiveMongoQueryExecution execution = getExecution(query, parameterAccessor,
new ResultProcessingConverter(processor, operations, instantiators));
return execution.execute(query, processor.getReturnedType().getDomainType(), collection);
}
private boolean hasReactiveWrapperParameter() {
for (MongoParameters.MongoParameter mongoParameter : method.getParameters()) {
if (ReactiveWrapperConverters.supports(mongoParameter.getType())) {
return true;
}
}
return false;
}
/**
* Returns the execution instance to use.
*
* @param query must not be {@literal null}.
* @param accessor must not be {@literal null}.
* @param resultProcessing must not be {@literal null}.
* @return
*/
private ReactiveMongoQueryExecution getExecution(Query query, MongoParameterAccessor accessor,
Converter<Object, Object> resultProcessing) {
return new ResultProcessingExecution(getExecutionToWrap(accessor), resultProcessing);
}
private ReactiveMongoQueryExecution getExecutionToWrap(MongoParameterAccessor accessor) {
if (isDeleteQuery()) {
return new DeleteExecution(operations, method);
} else if (method.isGeoNearQuery()) {
return new GeoNearExecution(operations, accessor, method.getReturnType());
} else if (method.isSliceQuery()) {
return new SlicedExecution(operations, accessor.getPageable());
} else if (isInfiniteStream(method)) {
return new TailExecution(operations, accessor.getPageable());
} else if (method.isCollectionQuery()) {
return new CollectionExecution(operations, accessor.getPageable());
} else if (method.isPageQuery()) {
return new PagedExecution(operations, accessor.getPageable());
} else {
return new SingleEntityExecution(operations, isCountQuery());
}
}
private boolean isInfiniteStream(MongoQueryMethod method) {
return method.getInfiniteStreamAnnotation() != null;
}
Query applyQueryMetaAttributesWhenPresent(Query query) {
if (method.hasQueryMetaAttributes()) {
query.setMeta(method.getQueryMetaAttributes());
}
return query;
}
/**
* Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to
* {@link #createQuery(ConvertingParameterAccessor)} by default but allows customization of the count query to be
* triggered.
*
* @param accessor must not be {@literal null}.
* @return
*/
protected Query createCountQuery(ConvertingParameterAccessor accessor) {
return applyQueryMetaAttributesWhenPresent(createQuery(accessor));
}
/**
* Creates a {@link Query} instance using the given {@link ParameterAccessor}
*
* @param accessor must not be {@literal null}.
* @return
*/
protected abstract Query createQuery(ConvertingParameterAccessor accessor);
/**
* Returns whether the query should get a count projection applied.
*
* @return
*/
protected abstract boolean isCountQuery();
/**
* Return weather the query should delete matching documents.
*
* @return
* @since 1.5
*/
protected abstract boolean isDeleteQuery();
}

View File

@@ -66,7 +66,7 @@ interface MongoQueryExecution {
* @author Oliver Gierke
*/
@RequiredArgsConstructor
static final class CollectionExecution implements MongoQueryExecution {
final class CollectionExecution implements MongoQueryExecution {
private final @NonNull MongoOperations operations;
private final Pageable pageable;
@@ -89,7 +89,7 @@ interface MongoQueryExecution {
* @since 1.5
*/
@RequiredArgsConstructor
static final class SlicedExecution implements MongoQueryExecution {
final class SlicedExecution implements MongoQueryExecution {
private final @NonNull MongoOperations operations;
private final @NonNull Pageable pageable;
@@ -121,7 +121,7 @@ interface MongoQueryExecution {
* @author Mark Paluch
*/
@RequiredArgsConstructor
static final class PagedExecution implements MongoQueryExecution {
final class PagedExecution implements MongoQueryExecution {
private final @NonNull MongoOperations operations;
private final @NonNull Pageable pageable;
@@ -161,7 +161,7 @@ interface MongoQueryExecution {
* @author Oliver Gierke
*/
@RequiredArgsConstructor
static final class SingleEntityExecution implements MongoQueryExecution {
final class SingleEntityExecution implements MongoQueryExecution {
private final MongoOperations operations;
private final boolean countProjection;
@@ -182,7 +182,7 @@ interface MongoQueryExecution {
* @author Oliver Gierke
*/
@RequiredArgsConstructor
static class GeoNearExecution implements MongoQueryExecution {
class GeoNearExecution implements MongoQueryExecution {
private final MongoOperations operations;
private final MongoParameterAccessor accessor;
@@ -248,7 +248,7 @@ interface MongoQueryExecution {
* @author Oliver Gierke
* @author Mark Paluch
*/
static final class PagingGeoNearExecution extends GeoNearExecution {
final class PagingGeoNearExecution extends GeoNearExecution {
private final MongoOperations operations;
private final MongoParameterAccessor accessor;
@@ -299,7 +299,7 @@ interface MongoQueryExecution {
* @since 1.5
*/
@RequiredArgsConstructor
static final class DeleteExecution implements MongoQueryExecution {
final class DeleteExecution implements MongoQueryExecution {
private final MongoOperations operations;
private final MongoQueryMethod method;
@@ -325,7 +325,7 @@ interface MongoQueryExecution {
* @since 1.7
*/
@RequiredArgsConstructor
static final class StreamExecution implements MongoQueryExecution {
final class StreamExecution implements MongoQueryExecution {
private final @NonNull MongoOperations operations;
private final @NonNull Converter<Object, Object> resultProcessing;
@@ -356,7 +356,7 @@ interface MongoQueryExecution {
* @since 1.9
*/
@RequiredArgsConstructor
static final class ResultProcessingExecution implements MongoQueryExecution {
final class ResultProcessingExecution implements MongoQueryExecution {
private final @NonNull MongoQueryExecution delegate;
private final @NonNull Converter<Object, Object> converter;
@@ -378,7 +378,7 @@ interface MongoQueryExecution {
* @since 1.9
*/
@RequiredArgsConstructor
static final class ResultProcessingConverter implements Converter<Object, Object> {
final class ResultProcessingConverter implements Converter<Object, Object> {
private final @NonNull ResultProcessor processor;
private final @NonNull MongoOperations operations;

View File

@@ -28,6 +28,7 @@ import org.springframework.data.geo.GeoResults;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.repository.InfiniteStream;
import org.springframework.data.mongodb.repository.Meta;
import org.springframework.data.mongodb.repository.Query;
import org.springframework.data.projection.ProjectionFactory;
@@ -219,6 +220,16 @@ public class MongoQueryMethod extends QueryMethod {
return AnnotatedElementUtils.findMergedAnnotation(method, Meta.class);
}
/**
* Returns the {@link InfiniteStream} annotation that is applied to the method or {@code null} if not available.
*
* @return
* @since 2.0
*/
InfiniteStream getInfiniteStreamAnnotation() {
return AnnotatedElementUtils.findMergedAnnotation(method, InfiniteStream.class);
}
/**
* Returns the {@link org.springframework.data.mongodb.core.query.Meta} attributes to be applied.
*

View File

@@ -0,0 +1,96 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import org.springframework.data.repository.util.ReactiveWrapperConverters;
import org.springframework.data.repository.util.ReactiveWrappers;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.MonoProcessor;
/**
* Reactive {@link org.springframework.data.repository.query.ParametersParameterAccessor} implementation that subscribes
* to reactive parameter wrapper types upon creation. This class performs synchronization when acessing parameters.
*
* @author Mark Paluch
*/
class ReactiveMongoParameterAccessor extends MongoParametersParameterAccessor {
private final Object[] values;
private final MonoProcessor<?>[] subscriptions;
public ReactiveMongoParameterAccessor(MongoQueryMethod method, Object[] values) {
super(method, values);
this.values = values;
this.subscriptions = new MonoProcessor<?>[values.length];
for (int i = 0; i < values.length; i++) {
Object value = values[i];
if (value == null) {
continue;
}
if (!ReactiveWrappers.supports(value.getClass())) {
continue;
}
if (ReactiveWrappers.isSingleValueType(value.getClass())) {
subscriptions[i] = ReactiveWrapperConverters.toWrapper(value, Mono.class).subscribe();
} else {
subscriptions[i] = ReactiveWrapperConverters.toWrapper(value, Flux.class).collectList().subscribe();
}
}
}
/* (non-Javadoc)
* @see org.springframework.data.repository.query.ParametersParameterAccessor#getValue(int)
*/
@SuppressWarnings("unchecked")
@Override
protected <T> T getValue(int index) {
if (subscriptions[index] != null) {
return (T) subscriptions[index].block();
}
return super.getValue(index);
}
/* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.MongoParametersParameterAccessor#getValues()
*/
@Override
public Object[] getValues() {
Object[] result = new Object[values.length];
for (int i = 0; i < result.length; i++) {
result[i] = getValue(i);
}
return result;
}
/* (non-Javadoc)
* @see org.springframework.data.repository.query.ParametersParameterAccessor#getBindableValue(int)
*/
public Object getBindableValue(int index) {
return getValue(getParameters().getBindableParameter(index).getIndex());
}
}

View File

@@ -0,0 +1,301 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.convert.EntityInstantiators;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Range;
import org.springframework.data.domain.Slice;
import org.springframework.data.geo.Distance;
import org.springframework.data.geo.GeoResult;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.support.ReactivePageImpl;
import org.springframework.data.mongodb.repository.support.ReactiveSliceImpl;
import org.springframework.data.repository.query.ResultProcessor;
import org.springframework.data.repository.query.ReturnedType;
import org.springframework.data.repository.util.ReactiveWrappers;
import org.springframework.data.util.TypeInformation;
import org.springframework.util.ClassUtils;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import com.mongodb.client.result.DeleteResult;
/**
* Set of classes to contain query execution strategies. Depending (mostly) on the return type of a
* {@link org.springframework.data.repository.query.QueryMethod} a {@link AbstractReactiveMongoQuery} can be executed in various
* flavors.
*
* @author Mark Paluch
* @since 2.0
*/
interface ReactiveMongoQueryExecution {
Object execute(Query query, Class<?> type, String collection);
/**
* {@link ReactiveMongoQueryExecution} for collection returning queries.
*
* @author Mark Paluch
*/
@RequiredArgsConstructor
final class CollectionExecution implements ReactiveMongoQueryExecution {
private final @NonNull ReactiveMongoOperations operations;
private final Pageable pageable;
@Override
public Object execute(Query query, Class<?> type, String collection) {
return operations.find(query.with(pageable), type, collection);
}
}
/**
* {@link ReactiveMongoQueryExecution} for collection returning queries using tailable cursors.
*
* @author Mark Paluch
*/
@RequiredArgsConstructor
final class TailExecution implements ReactiveMongoQueryExecution {
private final @NonNull ReactiveMongoOperations operations;
private final Pageable pageable;
@Override
public Object execute(Query query, Class<?> type, String collection) {
return operations.tail(query.with(pageable), type, collection);
}
}
/**
* {@link ReactiveMongoQueryExecution} for {@link Slice} query methods.
*
* @author Mark Paluch
*/
@RequiredArgsConstructor
final class SlicedExecution implements ReactiveMongoQueryExecution {
private final @NonNull ReactiveMongoOperations operations;
private final @NonNull Pageable pageable;
@Override
public Object execute(Query query, Class<?> type, String collection) {
int pageSize = pageable.getPageSize();
// Apply Pageable but tweak limit to peek into next page
Query modifiedQuery = query.with(pageable).limit(pageSize + 1);
Flux<?> flux = operations.find(modifiedQuery, type, collection);
return Mono.fromSupplier(() -> new ReactiveSliceImpl<>(flux, pageable));
}
}
/**
* {@link ReactiveMongoQueryExecution} for pagination queries.
*
* @author Mark Paluch
*/
@RequiredArgsConstructor
final class PagedExecution implements ReactiveMongoQueryExecution {
private final @NonNull ReactiveMongoOperations operations;
private final @NonNull Pageable pageable;
@Override
public Object execute(Query query, Class<?> type, String collection) {
int overallLimit = query.getLimit();
Mono<Long> count = operations.count(query, type, collection);
// Apply raw pagination
query = query.with(pageable);
// Adjust limit if page would exceed the overall limit
if (overallLimit != 0 && pageable.getOffset() + pageable.getPageSize() > overallLimit) {
query.limit(overallLimit - pageable.getOffset());
}
Flux<?> flux = operations.find(query, type, collection);
return Mono.fromSupplier(() -> new ReactivePageImpl<>(flux, pageable, count));
}
}
/**
* {@link ReactiveMongoQueryExecution} to return a single entity.
*
* @author Mark Paluch
*/
@RequiredArgsConstructor
final class SingleEntityExecution implements ReactiveMongoQueryExecution {
private final ReactiveMongoOperations operations;
private final boolean countProjection;
@Override
public Object execute(Query query, Class<?> type, String collection) {
return countProjection ? operations.count(query, type, collection) : operations.findOne(query, type, collection);
}
}
/**
* {@link MongoQueryExecution} to execute geo-near queries.
*
* @author Mark Paluch
*/
@RequiredArgsConstructor
class GeoNearExecution implements ReactiveMongoQueryExecution {
private final ReactiveMongoOperations operations;
private final MongoParameterAccessor accessor;
private final TypeInformation<?> returnType;
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
*/
@Override
public Object execute(Query query, Class<?> type, String collection) {
Flux<GeoResult<Object>> results = doExecuteQuery(query, type, collection);
return isStreamOfGeoResult() ? results : results.map(GeoResult::getContent);
}
@SuppressWarnings("unchecked")
protected Flux<GeoResult<Object>> doExecuteQuery(Query query, Class<?> type, String collection) {
Point nearLocation = accessor.getGeoNearLocation();
NearQuery nearQuery = NearQuery.near(nearLocation);
if (query != null) {
nearQuery.query(query);
}
Range<Distance> distances = accessor.getDistanceRange();
Distance maxDistance = distances.getUpperBound();
if (maxDistance != null) {
nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric());
}
Distance minDistance = distances.getLowerBound();
if (minDistance != null) {
nearQuery.minDistance(minDistance).in(minDistance.getMetric());
}
Pageable pageable = accessor.getPageable();
if (pageable != null) {
nearQuery.with(pageable);
}
return (Flux) operations.geoNear(nearQuery, type, collection);
}
private boolean isStreamOfGeoResult() {
if (!ReactiveWrappers.supports(returnType.getType())) {
return false;
}
TypeInformation<?> componentType = returnType.getComponentType();
return componentType != null && GeoResult.class.equals(componentType.getType());
}
}
/**
* {@link ReactiveMongoQueryExecution} removing documents matching the query.
*
* @author Mark Paluch
*/
@RequiredArgsConstructor
final class DeleteExecution implements ReactiveMongoQueryExecution {
private final ReactiveMongoOperations operations;
private final MongoQueryMethod method;
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
*/
@Override
public Object execute(Query query, Class<?> type, String collection) {
if (method.isCollectionQuery()) {
return operations.findAllAndRemove(query, type, collection);
}
return operations.remove(query, type, collection).map(DeleteResult::getDeletedCount);
}
}
/**
* An {@link ReactiveMongoQueryExecution} that wraps the results of the given delegate with the given result
* processing.
*/
@RequiredArgsConstructor
final class ResultProcessingExecution implements ReactiveMongoQueryExecution {
private final @NonNull ReactiveMongoQueryExecution delegate;
private final @NonNull Converter<Object, Object> converter;
@Override
public Object execute(Query query, Class<?> type, String collection) {
return converter.convert(delegate.execute(query, type, collection));
}
}
/**
* A {@link Converter} to post-process all source objects using the given {@link ResultProcessor}.
*
* @author Mark Paluch
*/
@RequiredArgsConstructor
final class ResultProcessingConverter implements Converter<Object, Object> {
private final @NonNull ResultProcessor processor;
private final @NonNull ReactiveMongoOperations operations;
private final @NonNull EntityInstantiators instantiators;
/*
* (non-Javadoc)
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
*/
@Override
public Object convert(Object source) {
ReturnedType returnedType = processor.getReturnedType();
if (ClassUtils.isPrimitiveOrWrapper(returnedType.getReturnedType())) {
return source;
}
Converter<Object, Object> converter = new DtoInstantiatingConverter(returnedType.getReturnedType(),
operations.getConverter().getMappingContext(), instantiators);
return processor.processResult(source, converter);
}
}
}

View File

@@ -0,0 +1,150 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import static org.springframework.data.repository.util.ClassUtils.*;
import java.lang.reflect.Method;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.domain.Sort;
import org.springframework.data.geo.GeoResult;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.projection.ProjectionFactory;
import org.springframework.data.repository.core.RepositoryMetadata;
import org.springframework.data.repository.util.ReactiveWrappers;
import org.springframework.data.util.ClassTypeInformation;
import org.springframework.data.util.TypeInformation;
/**
* Reactive specific implementation of {@link MongoQueryMethod}.
*
* @author Mark Paluch
* @since 2.0
*/
public class ReactiveMongoQueryMethod extends MongoQueryMethod {
private static final ClassTypeInformation<Page> PAGE_TYPE = ClassTypeInformation.from(Page.class);
private static final ClassTypeInformation<Slice> SLICE_TYPE = ClassTypeInformation.from(Slice.class);
private final Method method;
/**
* Creates a new {@link ReactiveMongoQueryMethod} from the given {@link Method}.
*
* @param method must not be {@literal null}.
* @param metadata must not be {@literal null}.
* @param projectionFactory must not be {@literal null}.
* @param mappingContext must not be {@literal null}.
*/
public ReactiveMongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory,
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
super(method, metadata, projectionFactory, mappingContext);
if (hasParameterOfType(method, Pageable.class)) {
TypeInformation<?> returnType = ClassTypeInformation.fromReturnTypeOf(method);
boolean multiWrapper = ReactiveWrappers.isMultiValueType(returnType.getType());
boolean singleWrapperWithWrappedPageableResult = ReactiveWrappers.isSingleValueType(returnType.getType())
&& (PAGE_TYPE.isAssignableFrom(returnType.getComponentType())
|| SLICE_TYPE.isAssignableFrom(returnType.getComponentType()));
if (!multiWrapper && !singleWrapperWithWrappedPageableResult) {
throw new IllegalStateException(String.format(
"Method has to use a either multi-item reactive wrapper return type or a wrapped Page/Slice type. Offending method: %s",
method.toString()));
}
if (hasParameterOfType(method, Sort.class)) {
throw new IllegalStateException(String.format("Method must not have Pageable *and* Sort parameter. "
+ "Use sorting capabilities on Pageble instead! Offending method: %s", method.toString()));
}
}
this.method = method;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.MongoQueryMethod#createParameters(java.lang.reflect.Method)
*/
@Override
protected MongoParameters createParameters(Method method) {
return new MongoParameters(method, isGeoNearQuery(method));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryMethod#isCollectionQuery()
*/
@Override
public boolean isCollectionQuery() {
return !(isPageQuery() || isSliceQuery()) && ReactiveWrappers.isMultiValueType(method.getReturnType());
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.MongoQueryMethod#isGeoNearQuery()
*/
@Override
public boolean isGeoNearQuery() {
return isGeoNearQuery(method);
}
private boolean isGeoNearQuery(Method method) {
if (ReactiveWrappers.supports(method.getReturnType())) {
TypeInformation<?> from = ClassTypeInformation.fromReturnTypeOf(method);
return GeoResult.class.equals(from.getComponentType().getType());
}
return false;
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryMethod#isModifyingQuery()
*/
@Override
public boolean isModifyingQuery() {
return super.isModifyingQuery();
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryMethod#isQueryForEntity()
*/
@Override
public boolean isQueryForEntity() {
return super.isQueryForEntity();
}
/*
* All reactive query methods are streaming queries.
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryMethod#isStreamQuery()
*/
@Override
public boolean isStreamQuery() {
return true;
}
}

View File

@@ -0,0 +1,150 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import org.springframework.core.convert.ConversionService;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Field;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.TextCriteria;
import org.springframework.data.repository.query.QueryMethod;
import org.springframework.data.repository.query.RepositoryQuery;
import org.springframework.data.repository.query.ResultProcessor;
import org.springframework.data.repository.query.ReturnedType;
import org.springframework.data.repository.query.parser.PartTree;
import org.springframework.util.StringUtils;
import com.mongodb.util.JSONParseException;
/**
* Reactive PartTree {@link RepositoryQuery} implementation for Mongo.
*
* @author Mark Paluch
*/
public class ReactivePartTreeMongoQuery extends AbstractReactiveMongoQuery {
private final PartTree tree;
private final boolean isGeoNearQuery;
private final MappingContext<?, MongoPersistentProperty> context;
private final ResultProcessor processor;
/**
* Creates a new {@link ReactivePartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}.
*
* @param method must not be {@literal null}.
* @param mongoOperations must not be {@literal null}.
* @param conversionService must not be {@literal null}.
*/
public ReactivePartTreeMongoQuery(MongoQueryMethod method, ReactiveMongoOperations mongoOperations, ConversionService conversionService) {
super(method, mongoOperations, conversionService);
this.processor = method.getResultProcessor();
this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType());
this.isGeoNearQuery = method.isGeoNearQuery();
this.context = mongoOperations.getConverter().getMappingContext();
}
/**
* Return the {@link PartTree} backing the query.
*
* @return the tree
*/
public PartTree getTree() {
return tree;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor, boolean)
*/
@Override
protected Query createQuery(ConvertingParameterAccessor accessor) {
MongoQueryCreator creator = new MongoQueryCreator(tree, accessor, context, isGeoNearQuery);
Query query = creator.createQuery();
if (tree.isLimiting()) {
query.limit(tree.getMaxResults());
}
TextCriteria textCriteria = accessor.getFullText();
if (textCriteria != null) {
query.addCriteria(textCriteria);
}
String fieldSpec = this.getQueryMethod().getFieldSpecification();
if (!StringUtils.hasText(fieldSpec)) {
ReturnedType returnedType = processor.withDynamicProjection(accessor).getReturnedType();
if (returnedType.isProjecting()) {
Field fields = query.fields();
for (String field : returnedType.getInputProperties()) {
fields.include(field);
}
}
return query;
}
try {
BasicQuery result = new BasicQuery(query.getQueryObject().toJson(), fieldSpec);
result.setSortObject(query.getSortObject());
return result;
} catch (JSONParseException o_O) {
throw new IllegalStateException(String.format("Invalid query or field specification in %s!", getQueryMethod()),
o_O);
}
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createCountQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor)
*/
@Override
protected Query createCountQuery(ConvertingParameterAccessor accessor) {
return new MongoQueryCreator(tree, accessor, context, false).createQuery();
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery()
*/
@Override
protected boolean isCountQuery() {
return tree.isCountProjection();
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery()
*/
@Override
protected boolean isDeleteQuery() {
return tree.isDelete();
}
}

View File

@@ -0,0 +1,143 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.convert.ConversionService;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.query.ExpressionEvaluatingParameterBinder.BindingContext;
import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBinding;
import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBindingParser;
import org.springframework.data.repository.query.EvaluationContextProvider;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.util.Assert;
/**
* Query to use a plain JSON String to create the {@link Query} to actually execute.
*
* @author Mark Paluch
*/
public class ReactiveStringBasedMongoQuery extends AbstractReactiveMongoQuery {
private static final String COUND_AND_DELETE = "Manually defined query for %s cannot be both a count and delete query at the same time!";
private static final Logger LOG = LoggerFactory.getLogger(ReactiveStringBasedMongoQuery.class);
private static final ParameterBindingParser BINDING_PARSER = ParameterBindingParser.INSTANCE;
private final String query;
private final String fieldSpec;
private final boolean isCountQuery;
private final boolean isDeleteQuery;
private final List<ParameterBinding> queryParameterBindings;
private final List<ParameterBinding> fieldSpecParameterBindings;
private final ExpressionEvaluatingParameterBinder parameterBinder;
/**
* Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}.
*
* @param method must not be {@literal null}.
* @param mongoOperations must not be {@literal null}.
* @param expressionParser must not be {@literal null}.
* @param evaluationContextProvider must not be {@literal null}.
* @param conversionService must not be {@literal null}.
*/
public ReactiveStringBasedMongoQuery(MongoQueryMethod method, ReactiveMongoOperations mongoOperations,
SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider, ConversionService conversionService) {
this(method.getAnnotatedQuery(), method, mongoOperations, expressionParser, evaluationContextProvider, conversionService);
}
/**
* Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod},
* {@link MongoOperations}, {@link SpelExpressionParser} and {@link EvaluationContextProvider}.
*
* @param query must not be {@literal null}.
* @param method must not be {@literal null}.
* @param mongoOperations must not be {@literal null}.
* @param expressionParser must not be {@literal null}.
* @param conversionService must not be {@literal null}.
*/
public ReactiveStringBasedMongoQuery(String query, MongoQueryMethod method, ReactiveMongoOperations mongoOperations,
SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider, ConversionService conversionService) {
super(method, mongoOperations, conversionService);
Assert.notNull(query, "Query must not be null!");
Assert.notNull(expressionParser, "SpelExpressionParser must not be null!");
this.queryParameterBindings = new ArrayList<ParameterBinding>();
this.query = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(query,
this.queryParameterBindings);
this.fieldSpecParameterBindings = new ArrayList<ParameterBinding>();
this.fieldSpec = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(
method.getFieldSpecification(), this.fieldSpecParameterBindings);
this.isCountQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().count() : false;
this.isDeleteQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().delete() : false;
if (isCountQuery && isDeleteQuery) {
throw new IllegalArgumentException(String.format(COUND_AND_DELETE, method));
}
this.parameterBinder = new ExpressionEvaluatingParameterBinder(expressionParser, evaluationContextProvider);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor)
*/
@Override
protected Query createQuery(ConvertingParameterAccessor accessor) {
String queryString = parameterBinder.bind(this.query, accessor,
new BindingContext(getQueryMethod().getParameters(), queryParameterBindings));
String fieldsString = parameterBinder.bind(this.fieldSpec, accessor,
new BindingContext(getQueryMethod().getParameters(), fieldSpecParameterBindings));
Query query = new BasicQuery(queryString, fieldsString).with(accessor.getSort());
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject()));
}
return query;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery()
*/
@Override
protected boolean isCountQuery() {
return isCountQuery;
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery()
*/
@Override
protected boolean isDeleteQuery() {
return this.isDeleteQuery;
}
}

View File

@@ -150,7 +150,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
*
* @author Thomas Darimont
*/
private static enum ParameterBindingParser {
static enum ParameterBindingParser {
INSTANCE;

View File

@@ -23,6 +23,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.IndexOperationsProvider;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.index.Index;
import org.springframework.data.mongodb.repository.query.MongoEntityMetadata;
@@ -38,23 +39,24 @@ import org.springframework.util.Assert;
* refers to.
*
* @author Oliver Gierke
* @author Mark Paluch
*/
class IndexEnsuringQueryCreationListener implements QueryCreationListener<PartTreeMongoQuery> {
private static final Set<Type> GEOSPATIAL_TYPES = new HashSet<Type>(Arrays.asList(Type.NEAR, Type.WITHIN));
private static final Logger LOG = LoggerFactory.getLogger(IndexEnsuringQueryCreationListener.class);
private final MongoOperations operations;
private final IndexOperationsProvider indexOperationsProvider;
/**
* Creates a new {@link IndexEnsuringQueryCreationListener} using the given {@link MongoOperations}.
*
* @param operations must not be {@literal null}.
* @param indexOperationsProvider must not be {@literal null}.
*/
public IndexEnsuringQueryCreationListener(MongoOperations operations) {
public IndexEnsuringQueryCreationListener(IndexOperationsProvider indexOperationsProvider) {
Assert.notNull(operations);
this.operations = operations;
Assert.notNull(indexOperationsProvider);
this.indexOperationsProvider = indexOperationsProvider;
}
/*
@@ -85,7 +87,7 @@ class IndexEnsuringQueryCreationListener implements QueryCreationListener<PartTr
}
MongoEntityMetadata<?> metadata = query.getQueryMethod().getEntityInformation();
operations.indexOps(metadata.getCollectionName()).ensureIndex(index);
indexOperationsProvider.indexOps(metadata.getCollectionName()).ensureIndex(index);
LOG.debug(String.format("Created %s!", index));
}

View File

@@ -40,8 +40,12 @@ import org.springframework.data.repository.query.EvaluationContextProvider;
import org.springframework.data.repository.query.QueryLookupStrategy;
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
import org.springframework.data.repository.query.RepositoryQuery;
import org.springframework.data.repository.reactive.ReactiveCrudRepository;
import org.springframework.data.repository.reactive.RxJavaCrudRepository;
import org.springframework.data.repository.util.QueryExecutionConverters;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
/**
* Factory to create {@link MongoRepository} instances.
@@ -52,6 +56,11 @@ import org.springframework.util.Assert;
*/
public class MongoRepositoryFactory extends RepositoryFactorySupport {
private static final boolean PROJECT_REACTOR_PRESENT = ClassUtils.isPresent("reactor.core.publisher.Flux",
QueryExecutionConverters.class.getClassLoader());
private static final boolean RXJAVA_OBSERVABLE_PRESENT = ClassUtils.isPresent("rx.Observable",
QueryExecutionConverters.class.getClassLoader());
private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser();
private final MongoOperations operations;
@@ -77,6 +86,14 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
@Override
protected Class<?> getRepositoryBaseClass(RepositoryMetadata metadata) {
boolean isReactiveRepository = (PROJECT_REACTOR_PRESENT && ReactiveCrudRepository.class.isAssignableFrom(metadata.getRepositoryInterface())) || (
RXJAVA_OBSERVABLE_PRESENT && RxJavaCrudRepository.class.isAssignableFrom(metadata.getRepositoryInterface()));
if (isReactiveRepository) {
return SimpleReactiveMongoRepository.class;
}
boolean isQueryDslRepository = QUERY_DSL_PRESENT
&& QueryDslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface());

View File

@@ -79,7 +79,7 @@ public class MongoRepositoryFactoryBean<T extends Repository<S, ID>, S, ID exten
RepositoryFactorySupport factory = getFactoryInstance(operations);
if (createIndexesForQueryMethods) {
factory.addQueryCreationListener(new IndexEnsuringQueryCreationListener(operations));
factory.addQueryCreationListener(new IndexEnsuringQueryCreationListener(collectionName -> operations.indexOps(collectionName)));
}
return factory;

View File

@@ -0,0 +1,253 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.domain.Sort;
import org.springframework.util.Assert;
import reactor.core.publisher.Flux;
import reactor.core.publisher.MonoProcessor;
/**
* A reactive chunk of data restricted by the configured {@link Pageable}.
*
* @author Mark Paluch
*/
abstract class ReactiveChunk<T> implements Slice<T>, Serializable {
private static final long serialVersionUID = 867755909294344406L;
private final Flux<T> content;
private final MonoProcessor<List<T>> processor;
private volatile List<T> contentCache;
private final Pageable pageable;
/**
* Creates a new {@link ReactiveChunk} with the given content and the given governing {@link Pageable}.
*
* @param content must not be {@literal null}.
* @param pageable can be {@literal null}.
*/
public ReactiveChunk(Flux<? extends T> content, Pageable pageable) {
Assert.notNull(content, "Content must not be null!");
this.content = (Flux) content;
this.pageable = pageable;
this.processor = this.content.collectList().doOnSuccess(list -> {
if (list.size() > pageable.getPageSize()) {
contentCache = list.subList(0, pageable.getPageSize());
} else {
contentCache = list;
}
}).subscribe();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#getNumber()
*/
public int getNumber() {
return pageable == null ? 0 : pageable.getPageNumber();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#getSize()
*/
public int getSize() {
return pageable == null ? 0 : pageable.getPageSize();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#getNumberOfElements()
*/
public int getNumberOfElements() {
return getContent0().size();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#hasPrevious()
*/
public boolean hasPrevious() {
return getNumber() > 0;
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#isFirst()
*/
public boolean isFirst() {
return !hasPrevious();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#isLast()
*/
public boolean isLast() {
return !hasNext();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#nextPageable()
*/
public Pageable nextPageable() {
return hasNext() ? pageable.next() : null;
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#previousPageable()
*/
public Pageable previousPageable() {
if (hasPrevious()) {
return pageable.previousOrFirst();
}
return null;
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#hasContent()
*/
public boolean hasContent() {
return !getContent0().isEmpty();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#getContent()
*/
public List<T> getContent() {
return Collections.unmodifiableList(getContent0());
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#getSort()
*/
public Sort getSort() {
return pageable == null ? null : pageable.getSort();
}
/*
* (non-Javadoc)
* @see java.lang.Iterable#iterator()
*/
public Iterator<T> iterator() {
return getContent0().iterator();
}
/**
* Applies the given {@link Converter} to the content of the {@link ReactiveChunk}.
*
* @param converter must not be {@literal null}.
* @return
*/
protected <S> List<S> getConvertedContent(Converter<? super T, ? extends S> converter) {
Assert.notNull(converter, "Converter must not be null!");
List<S> result = new ArrayList<S>(getContent0().size());
for (T element : this) {
result.add(converter.convert(element));
}
return result;
}
protected List<T> getContent0() {
if (contentCache != null) {
return contentCache;
}
List<T> list = processor.block();
if (list.size() > pageable.getPageSize()) {
return list.subList(0, pageable.getPageSize());
}
return list;
}
/**
* Returns whether the returned list contains more elements than specified by {@link Pageable#getPageSize()}.
*
* @return
*/
protected boolean containsMore() {
List<T> list = processor.block();
return list.size() > pageable.getPageSize();
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof ReactiveChunk<?>)) {
return false;
}
ReactiveChunk<?> that = (ReactiveChunk<?>) obj;
boolean pageableEqual = this.pageable == null ? that.pageable == null : this.pageable.equals(that.pageable);
return pageableEqual;
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
int result = 17;
result += 31 * (pageable == null ? 0 : pageable.hashCode());
return result;
}
}

View File

@@ -0,0 +1,230 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.reactivestreams.Publisher;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.MappingException;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
import org.springframework.data.mongodb.repository.query.MongoQueryMethod;
import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryMethod;
import org.springframework.data.mongodb.repository.query.ReactivePartTreeMongoQuery;
import org.springframework.data.mongodb.repository.query.ReactiveStringBasedMongoQuery;
import org.springframework.data.projection.ProjectionFactory;
import org.springframework.data.repository.core.NamedQueries;
import org.springframework.data.repository.core.RepositoryInformation;
import org.springframework.data.repository.core.RepositoryMetadata;
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
import org.springframework.data.repository.query.EvaluationContextProvider;
import org.springframework.data.repository.query.QueryLookupStrategy;
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
import org.springframework.data.repository.query.RepositoryQuery;
import org.springframework.data.repository.util.QueryExecutionConverters;
import org.springframework.data.repository.util.ReactiveWrapperConverters;
import org.springframework.data.repository.util.ReactiveWrappers;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
/**
* Factory to create {@link org.springframework.data.mongodb.repository.ReactiveMongoRepository} instances.
*
* @author Mark Paluch
* @since 2.0
*/
public class ReactiveMongoRepositoryFactory extends RepositoryFactorySupport {
private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser();
private final ReactiveMongoOperations operations;
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
private final ConversionService conversionService;
/**
* Creates a new {@link ReactiveMongoRepositoryFactory} with the given {@link ReactiveMongoOperations}.
*
* @param mongoOperations must not be {@literal null}.
*/
public ReactiveMongoRepositoryFactory(ReactiveMongoOperations mongoOperations) {
Assert.notNull(mongoOperations);
this.operations = mongoOperations;
this.mappingContext = mongoOperations.getConverter().getMappingContext();
DefaultConversionService conversionService = new DefaultConversionService();
QueryExecutionConverters.registerConvertersIn(conversionService);
this.conversionService = conversionService;
setConversionService(conversionService);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getRepositoryBaseClass(org.springframework.data.repository.core.RepositoryMetadata)
*/
@Override
protected Class<?> getRepositoryBaseClass(RepositoryMetadata metadata) {
return SimpleReactiveMongoRepository.class;
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getTargetRepository(org.springframework.data.repository.core.RepositoryInformation)
*/
@Override
protected Object getTargetRepository(RepositoryInformation information) {
MongoEntityInformation<?, Serializable> entityInformation = getEntityInformation(information.getDomainType(),
information);
return getTargetRepositoryViaReflection(information, entityInformation, operations);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getQueryLookupStrategy(org.springframework.data.repository.query.QueryLookupStrategy.Key, org.springframework.data.repository.query.EvaluationContextProvider)
*/
@Override
protected QueryLookupStrategy getQueryLookupStrategy(Key key, EvaluationContextProvider evaluationContextProvider) {
return new MongoQueryLookupStrategy(operations, evaluationContextProvider, mappingContext, conversionService);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getEntityInformation(java.lang.Class)
*/
public <T, ID extends Serializable> MongoEntityInformation<T, ID> getEntityInformation(Class<T> domainClass) {
return getEntityInformation(domainClass, null);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.core.support.RepositoryFactorySupport#validate(org.springframework.data.repository.core.RepositoryMetadata)
*/
@Override
protected void validate(RepositoryMetadata repositoryMetadata) {
if (!ReactiveWrappers.isAvailable()) {
throw new InvalidDataAccessApiUsageException(
String.format("Cannot implement Repository %s without reactive library support.",
repositoryMetadata.getRepositoryInterface().getName()));
}
Arrays.stream(repositoryMetadata.getRepositoryInterface().getMethods())
.forEach(ReactiveMongoRepositoryFactory::validate);
}
/**
* Reactive MongoDB support requires reactive wrapper support. If return type/parameters are reactive wrapper types,
* then it's required to be able to convert these into Publisher.
*
* @param method the method to validate.
*/
private static void validate(Method method) {
if (ReactiveWrappers.supports(method.getReturnType())
&& !ClassUtils.isAssignable(Publisher.class, method.getReturnType())) {
if (!ReactiveWrapperConverters.supports(method.getReturnType())) {
throw new InvalidDataAccessApiUsageException(
String.format("No reactive type converter found for type %s used in %s, method %s.",
method.getReturnType().getName(), method.getDeclaringClass().getName(), method));
}
}
Arrays.stream(method.getParameterTypes()) //
.filter(ReactiveWrappers::supports) //
.filter(parameterType -> !ClassUtils.isAssignable(Publisher.class, parameterType)) //
.filter(parameterType -> !ReactiveWrapperConverters.supports(parameterType)) //
.forEach(parameterType -> {
throw new InvalidDataAccessApiUsageException(
String.format("No reactive type converter found for type %s used in %s, method %s.",
parameterType.getName(), method.getDeclaringClass().getName(), method));
});
}
@SuppressWarnings("unchecked")
private <T, ID extends Serializable> MongoEntityInformation<T, ID> getEntityInformation(Class<T> domainClass,
RepositoryInformation information) {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(domainClass);
if (entity == null) {
throw new MappingException(
String.format("Could not lookup mapping metadata for domain class %s!", domainClass.getName()));
}
return new MappingMongoEntityInformation<T, ID>((MongoPersistentEntity<T>) entity,
information != null ? (Class<ID>) information.getIdType() : null);
}
/**
* {@link QueryLookupStrategy} to create {@link PartTreeMongoQuery} instances.
*
* @author Mark Paluch
*/
private static class MongoQueryLookupStrategy implements QueryLookupStrategy {
private final ReactiveMongoOperations operations;
private final EvaluationContextProvider evaluationContextProvider;
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
final ConversionService conversionService;
MongoQueryLookupStrategy(ReactiveMongoOperations operations, EvaluationContextProvider evaluationContextProvider,
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
ConversionService conversionService) {
this.operations = operations;
this.evaluationContextProvider = evaluationContextProvider;
this.mappingContext = mappingContext;
this.conversionService = conversionService;
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.projection.ProjectionFactory, org.springframework.data.repository.core.NamedQueries)
*/
@Override
public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory,
NamedQueries namedQueries) {
MongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, metadata, factory, mappingContext);
String namedQueryName = queryMethod.getNamedQueryName();
if (namedQueries.hasQuery(namedQueryName)) {
String namedQuery = namedQueries.getQuery(namedQueryName);
return new ReactiveStringBasedMongoQuery(namedQuery, queryMethod, operations, EXPRESSION_PARSER,
evaluationContextProvider, conversionService);
} else if (queryMethod.hasAnnotatedQuery()) {
return new ReactiveStringBasedMongoQuery(queryMethod, operations, EXPRESSION_PARSER, evaluationContextProvider,
conversionService);
} else {
return new ReactivePartTreeMongoQuery(queryMethod, operations, conversionService);
}
}
}
}

View File

@@ -0,0 +1,120 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import java.io.Serializable;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport;
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
import org.springframework.util.Assert;
/**
* {@link org.springframework.beans.factory.FactoryBean} to create
* {@link org.springframework.data.mongodb.repository.ReactiveMongoRepository} instances.
*
* @author Mark Paluch
* @since 2.0
* @see org.springframework.data.repository.reactive.ReactivePagingAndSortingRepository
* @see org.springframework.data.repository.reactive.RxJavaPagingAndSortingRepository
*/
public class ReactiveMongoRepositoryFactoryBean<T extends Repository<S, ID>, S, ID extends Serializable>
extends RepositoryFactoryBeanSupport<T, S, ID> {
private ReactiveMongoOperations operations;
private boolean createIndexesForQueryMethods = false;
private boolean mappingContextConfigured = false;
/**
* Configures the {@link ReactiveMongoOperations} to be used.
*
* @param operations the operations to set
*/
public void setReactiveMongoOperations(ReactiveMongoOperations operations) {
this.operations = operations;
}
/**
* Configures whether to automatically create indexes for the properties referenced in a query method.
*
* @param createIndexesForQueryMethods the createIndexesForQueryMethods to set
*/
public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods) {
this.createIndexesForQueryMethods = createIndexesForQueryMethods;
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext)
*/
@Override
protected void setMappingContext(MappingContext<?, ?> mappingContext) {
super.setMappingContext(mappingContext);
this.mappingContextConfigured = true;
}
/*
* (non-Javadoc)
*
* @see
* org.springframework.data.repository.support.RepositoryFactoryBeanSupport
* #createRepositoryFactory()
*/
@Override
protected final RepositoryFactorySupport createRepositoryFactory() {
RepositoryFactorySupport factory = getFactoryInstance(operations);
if (createIndexesForQueryMethods) {
factory.addQueryCreationListener(
new IndexEnsuringQueryCreationListener(collectionName -> operations.indexOps(collectionName)));
}
return factory;
}
/**
* Creates and initializes a {@link RepositoryFactorySupport} instance.
*
* @param operations
* @return
*/
protected RepositoryFactorySupport getFactoryInstance(ReactiveMongoOperations operations) {
return new ReactiveMongoRepositoryFactory(operations);
}
/*
* (non-Javadoc)
*
* @see
* org.springframework.data.repository.support.RepositoryFactoryBeanSupport
* #afterPropertiesSet()
*/
@Override
public void afterPropertiesSet() {
super.afterPropertiesSet();
Assert.notNull(operations, "ReactiveMongoOperations must not be null!");
if (!mappingContextConfigured) {
setMappingContext(operations.getConverter().getMappingContext());
}
}
}

View File

@@ -0,0 +1,169 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import java.util.List;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.MonoProcessor;
/**
* Reactive {@code Page} implementation.
*
* @param <T> the type of which the page consists.
* @author Mark Paluch
* @since 2.0
*/
public class ReactivePageImpl<T> extends ReactiveChunk<T> implements Page<T> {
private static final long serialVersionUID = 867755909294344406L;
private final MonoProcessor<Long> totalMono;
private volatile Long totalValueCache;
private final Pageable pageable;
/**
* Constructor of {@code PageImpl}.
*
* @param content the content of this page, must not be {@literal null}.
* @param pageable the paging information, can be {@literal null}.
* @param totalMono the total amount of items available. The total might be adapted considering the length of the
* content given, if it is going to be the content of the last page. This is in place to mitigate
* inconsistencies
*/
public ReactivePageImpl(Flux<? extends T> content, Pageable pageable, Mono<Long> totalMono) {
super(content, pageable);
this.pageable = pageable;
this.totalMono = totalMono.subscribe();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Page#getTotalPages()
*/
@Override
public int getTotalPages() {
return getSize() == 0 ? 1 : (int) Math.ceil((double) getTotal0() / (double) getSize());
}
private long getTotal0() {
if (totalValueCache == null) {
long total = totalMono.block();
List<T> content = getContent();
this.totalValueCache = !content.isEmpty() && pageable != null
&& pageable.getOffset() + pageable.getPageSize() > total ? pageable.getOffset() + content.size() : total;
}
return totalValueCache;
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Page#getTotalElements()
*/
@Override
public long getTotalElements() {
return getTotal0();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#hasNext()
*/
@Override
public boolean hasNext() {
return getNumber() + 1 < getTotalPages();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#isLast()
*/
@Override
public boolean isLast() {
return !hasNext();
}
/*
* (non-Javadoc)
* @see org.springframework.data.domain.Slice#transform(org.springframework.core.convert.converter.Converter)
*/
@Override
public <S> Page<S> map(Converter<? super T, ? extends S> converter) {
return new ReactivePageImpl<S>(Flux.fromIterable(getConvertedContent(converter)), pageable, Mono.just(getTotal0()));
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
String contentType = "UNKNOWN";
List<T> content = getContent();
if (content.size() > 0) {
contentType = content.get(0).getClass().getName();
}
return String.format("Page %s of %d containing %s instances", getNumber() + 1, getTotalPages(), contentType);
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof ReactivePageImpl<?>)) {
return false;
}
ReactivePageImpl<?> that = (ReactivePageImpl<?>) obj;
return getTotal0() == that.getTotal0() && super.equals(obj);
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
int result = 17;
result += 31 * (int) (getTotal0() ^ getTotal0() >>> 32);
result += 31 * super.hashCode();
return result;
}
}

View File

@@ -0,0 +1,66 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import java.util.List;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.domain.SliceImpl;
import reactor.core.publisher.Flux;
/**
* Reactive {@code Page} implementation.
*
* @param <T> the type of which the page consists.
* @author Mark Paluch
* @since 2.0
*/
public class ReactiveSliceImpl<T> extends ReactiveChunk<T> {
private static final long serialVersionUID = 867755909294344406L;
private final Pageable pageable;
public ReactiveSliceImpl(Flux<T> content, Pageable pageable) {
super(content, pageable);
this.pageable = pageable;
}
public boolean hasNext() {
return containsMore();
}
public <S> Slice<S> map(Converter<? super T, ? extends S> converter) {
return new SliceImpl<>(this.getConvertedContent(converter), pageable, this.hasNext());
}
public String toString() {
String contentType = "UNKNOWN";
List content = this.getContent();
if (content.size() > 0) {
contentType = content.get(0).getClass().getName();
}
return String.format("Slice %d containing %s instances",
this.getNumber(), contentType);
}
}

View File

@@ -0,0 +1,352 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.reactivestreams.Publisher;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.ReactiveMongoRepository;
import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
import org.springframework.util.Assert;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Reactive repository base implementation for Mongo.
*
* @author Mark Paluch
* @since 2.0
*/
public class SimpleReactiveMongoRepository<T, ID extends Serializable> implements ReactiveMongoRepository<T, ID> {
private final ReactiveMongoOperations mongoOperations;
private final MongoEntityInformation<T, ID> entityInformation;
/**
* Creates a new {@link SimpleReactiveMongoRepository} for the given {@link MongoEntityInformation} and
* {@link ReactiveMongoOperations}.
*
* @param metadata must not be {@literal null}.
* @param mongoOperations must not be {@literal null}.
*/
public SimpleReactiveMongoRepository(MongoEntityInformation<T, ID> metadata,
ReactiveMongoOperations mongoOperations) {
Assert.notNull(mongoOperations);
Assert.notNull(metadata);
this.entityInformation = metadata;
this.mongoOperations = mongoOperations;
}
public Mono<T> findOne(ID id) {
Assert.notNull(id, "The given id must not be null!");
return mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName());
}
public Mono<T> findOne(Mono<ID> mono) {
Assert.notNull(mono, "The given id must not be null!");
return mono.then(
id -> mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName()));
}
public <S extends T> Mono<S> findOne(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query q = new Query(new Criteria().alike(example));
return mongoOperations.findOne(q, example.getProbeType(), entityInformation.getCollectionName());
}
public Mono<Boolean> exists(ID id) {
Assert.notNull(id, "The given id must not be null!");
return mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
entityInformation.getCollectionName());
}
public Mono<Boolean> exists(Mono<ID> mono) {
Assert.notNull(mono, "The given id must not be null!");
return mono.then(id -> mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
entityInformation.getCollectionName()));
}
public <S extends T> Mono<Boolean> exists(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query q = new Query(new Criteria().alike(example));
return mongoOperations.exists(q, example.getProbeType(), entityInformation.getCollectionName());
}
@Override
public Flux<T> findAll() {
return findAll(new Query());
}
@Override
public Flux<T> findAll(Iterable<ID> ids) {
Assert.notNull(ids, "The given Iterable of Id's must not be null!");
Set<ID> parameters = new HashSet<ID>(tryDetermineRealSizeOrReturn(ids, 10));
for (ID id : ids) {
parameters.add(id);
}
return findAll(new Query(new Criteria(entityInformation.getIdAttribute()).in(parameters)));
}
@Override
public Flux<T> findAll(Publisher<ID> idStream) {
Assert.notNull(idStream, "The given Publisher of Id's must not be null!");
return Flux.from(idStream).buffer().flatMap(this::findAll);
}
@Override
public Mono<Page<T>> findAll(Pageable pageable) {
Assert.notNull(pageable, "The given Pageable must not be null!");
Mono<Long> count = count();
Flux<T> content = findAll(new Query().with(pageable));
return Mono.fromCallable(() -> new ReactivePageImpl<>(content, pageable, count));
}
@Override
public Flux<T> findAll(Sort sort) {
return findAll(new Query().with(sort));
}
@Override
public <S extends T> Flux<S> findAll(Example<S> example, Sort sort) {
Assert.notNull(example, "Sample must not be null!");
Query q = new Query(new Criteria().alike(example));
if (sort != null) {
q.with(sort);
}
return mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName());
}
@Override
public <S extends T> Flux<S> findAll(Example<S> example) {
return findAll(example, null);
}
public Mono<Long> count() {
return mongoOperations.count(new Query(), entityInformation.getCollectionName());
}
public <S extends T> Mono<Long> count(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query q = new Query(new Criteria().alike(example));
return mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName());
}
@Override
public <S extends T> Mono<S> insert(S entity) {
Assert.notNull(entity, "Entity must not be null!");
return mongoOperations.insert(entity, entityInformation.getCollectionName());
}
@Override
public <S extends T> Flux<S> insert(Iterable<S> entities) {
Assert.notNull(entities, "The given Iterable of entities must not be null!");
List<S> list = convertIterableToList(entities);
if (list.isEmpty()) {
return Flux.empty();
}
return Flux.from(mongoOperations.insertAll(list));
}
@Override
public <S extends T> Flux<S> insert(Publisher<S> entities) {
Assert.notNull(entities, "The given Publisher of entities must not be null!");
return Flux.from(entities).flatMap(entity -> {
return mongoOperations.insert(entity, entityInformation.getCollectionName());
});
}
public <S extends T> Mono<S> save(S entity) {
Assert.notNull(entity, "Entity must not be null!");
if (entityInformation.isNew(entity)) {
return mongoOperations.insert(entity, entityInformation.getCollectionName());
}
return mongoOperations.save(entity, entityInformation.getCollectionName());
}
public <S extends T> Flux<S> save(Iterable<S> entities) {
Assert.notNull(entities, "The given Iterable of entities must not be null!");
List<S> result = convertIterableToList(entities);
boolean allNew = true;
for (S entity : entities) {
if (allNew && !entityInformation.isNew(entity)) {
allNew = false;
}
}
if (allNew) {
return Flux.from(mongoOperations.insertAll(result));
}
List<Mono<S>> monos = new ArrayList<>();
for (S entity : result) {
monos.add(save(entity));
}
return Flux.merge(monos);
}
@Override
public <S extends T> Flux<S> save(Publisher<S> entityStream) {
Assert.notNull(entityStream, "The given Publisher of entities must not be null!");
return Flux.from(entityStream).flatMap(entity -> {
if (entityInformation.isNew(entity)) {
return mongoOperations.insert(entity, entityInformation.getCollectionName()).then(aVoid -> Mono.just(entity));
}
return mongoOperations.save(entity, entityInformation.getCollectionName()).then(aVoid -> Mono.just(entity));
});
}
public Mono<Void> delete(ID id) {
Assert.notNull(id, "The given id must not be null!");
return mongoOperations
.remove(getIdQuery(id), entityInformation.getJavaType(), entityInformation.getCollectionName())
.then();
}
public Mono<Void> delete(T entity) {
Assert.notNull(entity, "The given entity must not be null!");
return delete(entityInformation.getId(entity));
}
public Mono<Void> delete(Iterable<? extends T> entities) {
Assert.notNull(entities, "The given Iterable of entities must not be null!");
return Flux.fromIterable(entities).flatMap(entity -> delete(entityInformation.getId(entity))).then();
}
@Override
public Mono<Void> delete(Publisher<? extends T> entityStream) {
Assert.notNull(entityStream, "The given Publisher of entities must not be null!");
return Flux.from(entityStream).flatMap(entity -> delete(entityInformation.getId(entity))).then();
}
public Mono<Void> deleteAll() {
return mongoOperations.remove(new Query(), entityInformation.getCollectionName())
.then(deleteResult -> Mono.empty());
}
private Query getIdQuery(Object id) {
return new Query(getIdCriteria(id));
}
private Criteria getIdCriteria(Object id) {
return where(entityInformation.getIdAttribute()).is(id);
}
private Flux<T> findAll(Query query) {
if (query == null) {
return Flux.empty();
}
return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName());
}
private static <T> List<T> convertIterableToList(Iterable<T> entities) {
if (entities instanceof List) {
return (List<T>) entities;
}
int capacity = tryDetermineRealSizeOrReturn(entities, 10);
if (capacity == 0 || entities == null) {
return Collections.emptyList();
}
List<T> list = new ArrayList<T>(capacity);
for (T entity : entities) {
list.add(entity);
}
return list;
}
private static int tryDetermineRealSizeOrReturn(Iterable<?> iterable, int defaultSize) {
return iterable == null ? 0 : (iterable instanceof Collection) ? ((Collection<?>) iterable).size() : defaultSize;
}
}

View File

@@ -18,11 +18,9 @@ package org.springframework.data.mongodb.config;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import example.first.First;
import example.second.Second;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import org.junit.Rule;
@@ -45,11 +43,15 @@ import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import example.first.First;
import example.second.Second;
/**
* Unit tests for {@link AbstractMongoConfiguration}.
*
* @author Oliver Gierke
* @author Thomas Darimont
* @author Mark Paluch
*/
public class AbstractMongoConfigurationUnitTests {
@@ -63,7 +65,7 @@ public class AbstractMongoConfigurationUnitTests {
AbstractMongoConfiguration configuration = new SampleMongoConfiguration();
assertThat(configuration.getMappingBasePackage(), is(SampleMongoConfiguration.class.getPackage().getName()));
assertThat(configuration.getInitialEntitySet(), hasSize(1));
assertThat(configuration.getInitialEntitySet(), hasSize(2));
assertThat(configuration.getInitialEntitySet(), hasItem(Entity.class));
}
@@ -72,9 +74,7 @@ public class AbstractMongoConfigurationUnitTests {
*/
@Test
public void doesNotScanPackageIfMappingPackageIsNull() throws ClassNotFoundException {
assertScanningDisabled(null);
}
/**
@@ -169,12 +169,12 @@ public class AbstractMongoConfigurationUnitTests {
AbstractMongoConfiguration configuration = new SampleMongoConfiguration() {
@Override
protected String getMappingBasePackage() {
return value;
protected Collection<String> getMappingBasePackages() {
return Collections.singleton(value);
}
};
assertThat(configuration.getMappingBasePackage(), is(value));
assertThat(configuration.getMappingBasePackages(), hasItem(value));
assertThat(configuration.getInitialEntitySet(), hasSize(0));
}

View File

@@ -0,0 +1,69 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import static org.assertj.core.api.AssertionsForInterfaceTypes.*;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoClients;
/**
* Integration tests for {@link AbstractReactiveMongoConfiguration}.
*
* @author Mark Paluch
*/
@RunWith(SpringRunner.class)
@ContextConfiguration(classes = AbstractReactiveMongoConfigurationIntegrationTests.ReactiveConfiguration.class)
public class AbstractReactiveMongoConfigurationIntegrationTests {
@Autowired ApplicationContext context;
/**
* @see DATAMONGO-1444
*/
@Test
public void contextShouldContainTemplate() {
assertThat(context.getBean(SimpleReactiveMongoDatabaseFactory.class)).isNotNull();
assertThat(context.getBean(ReactiveMongoOperations.class)).isNotNull();
assertThat(context.getBean(ReactiveMongoTemplate.class)).isNotNull();
}
@Configuration
static class ReactiveConfiguration extends AbstractReactiveMongoConfiguration {
@Override
public MongoClient mongoClient() {
return MongoClients.create();
}
@Override
protected String getDatabaseName() {
return "database";
}
}
}

View File

@@ -0,0 +1,226 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.config;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoTypeMapper;
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.Mongo;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoClients;
import example.first.First;
import example.second.Second;
/**
* Unit tests for {@link AbstractReactiveMongoConfiguration}.
*
* @author Mark Paluch
*/
public class AbstractReactiveMongoConfigurationUnitTests {
@Rule public ExpectedException exception = ExpectedException.none();
/**
* @see DATAMONGO-1444
*/
@Test
public void usesConfigClassPackageAsBaseMappingPackage() throws ClassNotFoundException {
AbstractReactiveMongoConfiguration configuration = new SampleMongoConfiguration();
assertThat(configuration.getMappingBasePackages(), hasItem(SampleMongoConfiguration.class.getPackage().getName()));
assertThat(configuration.getInitialEntitySet(), hasSize(2));
assertThat(configuration.getInitialEntitySet(), hasItem(Entity.class));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void doesNotScanPackageIfMappingPackageIsNull() throws ClassNotFoundException {
assertScanningDisabled(null);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void doesNotScanPackageIfMappingPackageIsEmpty() throws ClassNotFoundException {
assertScanningDisabled("");
assertScanningDisabled(" ");
}
/**
* @see DATAMONGO-1444
*/
@Test
public void containsMongoDbFactoryButNoMongoBean() {
AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
assertThat(context.getBean(SimpleReactiveMongoDatabaseFactory.class), is(notNullValue()));
exception.expect(NoSuchBeanDefinitionException.class);
context.getBean(Mongo.class);
context.close();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void returnsUninitializedMappingContext() throws Exception {
SampleMongoConfiguration configuration = new SampleMongoConfiguration();
MongoMappingContext context = configuration.mongoMappingContext();
assertThat(context.getPersistentEntities(), is(emptyIterable()));
context.initialize();
assertThat(context.getPersistentEntities(), is(not(emptyIterable())));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void lifecycleCallbacksAreInvokedInAppropriateOrder() {
AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class);
BasicMongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(Entity.class);
StandardEvaluationContext spElContext = (StandardEvaluationContext) ReflectionTestUtils.getField(entity, "context");
assertThat(spElContext.getBeanResolver(), is(notNullValue()));
context.close();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldBeAbleToConfigureCustomTypeMapperViaJavaConfig() {
AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class);
MongoTypeMapper typeMapper = context.getBean(CustomMongoTypeMapper.class);
MappingMongoConverter mmc = context.getBean(MappingMongoConverter.class);
assertThat(mmc, is(notNullValue()));
assertThat(mmc.getTypeMapper(), is(typeMapper));
context.close();
}
/**
* @see DATAMONGO-1444
*/
@Test
@SuppressWarnings("unchecked")
public void allowsMultipleEntityBasePackages() throws ClassNotFoundException {
ConfigurationWithMultipleBasePackages config = new ConfigurationWithMultipleBasePackages();
Set<Class<?>> entities = config.getInitialEntitySet();
assertThat(entities, hasSize(2));
assertThat(entities, hasItems(First.class, Second.class));
}
private static void assertScanningDisabled(final String value) throws ClassNotFoundException {
AbstractReactiveMongoConfiguration configuration = new SampleMongoConfiguration() {
@Override
protected Collection<String> getMappingBasePackages() {
return Collections.singleton(value);
}
};
assertThat(configuration.getMappingBasePackages(), hasItem(value));
assertThat(configuration.getInitialEntitySet(), hasSize(0));
}
@Configuration
static class SampleMongoConfiguration extends AbstractReactiveMongoConfiguration {
@Override
protected String getDatabaseName() {
return "database";
}
@Override
public MongoClient mongoClient() {
return MongoClients.create();
}
@Bean
@Override
public MappingMongoConverter mappingMongoConverter() throws Exception {
MappingMongoConverter converter = super.mappingMongoConverter();
converter.setTypeMapper(typeMapper());
return converter;
}
@Bean
public MongoTypeMapper typeMapper() {
return new CustomMongoTypeMapper();
}
}
static class ConfigurationWithMultipleBasePackages extends AbstractReactiveMongoConfiguration {
@Override
protected String getDatabaseName() {
return "test";
}
@Override
public MongoClient mongoClient() {
return MongoClients.create();
}
@Override
protected Collection<String> getMappingBasePackages() {
return Arrays.asList("example.first", "example.second");
}
}
@Document
static class Entity {}
}

View File

@@ -54,8 +54,7 @@ public class DefaultIndexOperationsIntegrationTests {
this.collection = this.template.getDb().getCollection(collectionName, Document.class);
this.collection.dropIndexes();
this.indexOps = new DefaultIndexOperations(template, collectionName);
this.indexOps = new DefaultIndexOperations(template.getMongoDbFactory(), collectionName);
}
/**

View File

@@ -291,7 +291,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
GenericApplicationContext applicationContext = new GenericApplicationContext();
applicationContext.getBeanFactory().registerSingleton("foo",
new MongoPersistentEntityIndexCreator(new MongoMappingContext(), factory));
new MongoPersistentEntityIndexCreator(new MongoMappingContext(), template));
applicationContext.refresh();
GenericApplicationContext spy = spy(applicationContext);

View File

@@ -49,6 +49,7 @@ import com.mongodb.client.FindIterable;
public class QueryCursorPreparerUnitTests {
@Mock MongoDbFactory factory;
@Mock MongoExceptionTranslator exceptionTranslatorMock;
@Mock FindIterable<Document> cursor;
@Mock FindIterable<Document> cursorToUse;
@@ -56,6 +57,7 @@ public class QueryCursorPreparerUnitTests {
@Before
public void setUp() {
when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock);
when(cursor.batchSize(anyInt())).thenReturn(cursor);
when(cursor.filter(any(Document.class))).thenReturn(cursor);
when(cursor.limit(anyInt())).thenReturn(cursor);

View File

@@ -0,0 +1,242 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static com.sun.prism.impl.Disposer.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.junit.Assume.*;
import java.util.List;
import org.bson.Document;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.mongodb.UncategorizedMongoDbException;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.mongodb.MongoException;
import com.mongodb.ReadPreference;
import com.mongodb.reactivestreams.client.MongoDatabase;
import reactor.core.publisher.Flux;
import reactor.test.TestSubscriber;
/**
* Integration test for {@link ReactiveMongoTemplate} execute methods.
*
* @author Mark Paluch
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:reactive-infrastructure.xml")
public class ReactiveMongoTemplateExecuteTests {
private static final org.springframework.data.util.Version THREE = org.springframework.data.util.Version.parse("3.0");
@Autowired SimpleReactiveMongoDatabaseFactory factory;
@Autowired ReactiveMongoOperations operations;
@Rule public ExpectedException thrown = ExpectedException.none();
org.springframework.data.util.Version mongoVersion;
@Before
public void setUp() {
cleanUp();
if (mongoVersion == null) {
org.bson.Document result = operations.executeCommand("{ buildInfo: 1 }").block();
mongoVersion = org.springframework.data.util.Version.parse(result.get("version").toString());
}
}
@After
public void tearDown() {
operations.dropCollection("person").block();
operations.dropCollection(Person.class).block();
operations.dropCollection("execute_test").block();
operations.dropCollection("execute_test1").block();
operations.dropCollection("execute_test2").block();
operations.dropCollection("execute_index_test").block();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeCommandJsonCommandShouldReturnSingleResponse() throws Exception {
Document document = operations.executeCommand("{ buildInfo: 1 }").block();
assertThat(document, hasKey("version"));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeCommandDocumentCommandShouldReturnSingleResponse() throws Exception {
Document document = operations.executeCommand(new Document("buildInfo", 1)).block();
assertThat(document, hasKey("version"));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeCommandJsonCommandShouldReturnMultipleResponses() throws Exception {
assumeTrue(mongoVersion.isGreaterThan(THREE));
operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").block();
TestSubscriber<Document> subscriber = TestSubscriber.create();
operations.executeCommand("{ find: 'execute_test'}").subscribe(subscriber);
subscriber.awaitAndAssertNextValueCount(1);
subscriber.assertValuesWith(document -> {
assertThat(document, hasKey("waitedMS"));
assertThat(document, hasKey("cursor"));
});
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeCommandJsonCommandShouldTranslateExceptions() throws Exception {
TestSubscriber<Document> testSubscriber = TestSubscriber.subscribe(operations.executeCommand("{ unknown: 1 }"));
testSubscriber.await().assertError(InvalidDataAccessApiUsageException.class);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeCommandDocumentCommandShouldTranslateExceptions() throws Exception {
TestSubscriber<Document> testSubscriber = TestSubscriber
.subscribe(operations.executeCommand(new Document("unknown", 1)));
testSubscriber.await().assertError(InvalidDataAccessApiUsageException.class);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeCommandWithReadPreferenceCommandShouldTranslateExceptions() throws Exception {
TestSubscriber<Document> testSubscriber = TestSubscriber
.subscribe(operations.executeCommand(new Document("unknown", 1), ReadPreference.nearest()));
testSubscriber.await().assertError(InvalidDataAccessApiUsageException.class);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeOnDatabaseShouldExecuteCommand() throws Exception {
operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").block();
operations.executeCommand("{ insert: 'execute_test1', documents: [{},{},{}]}").block();
operations.executeCommand("{ insert: 'execute_test2', documents: [{},{},{}]}").block();
Flux<Document> execute = operations.execute(MongoDatabase::listCollections);
List<Document> documents = execute.filter(document -> document.getString("name").startsWith("execute_test"))
.collectList().block();
assertThat(documents, hasSize(3));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeOnDatabaseShouldDeferExecution() throws Exception {
operations.execute(db -> {
throw new MongoException(50, "hi there");
});
// the assertion here is that the exception is not thrown
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeOnDatabaseShouldShouldTranslateExceptions() throws Exception {
TestSubscriber<Document> testSubscriber = TestSubscriber.create();
Flux<Document> execute = operations.execute(db -> {
throw new MongoException(50, "hi there");
});
execute.subscribe(testSubscriber);
testSubscriber.await().assertError(UncategorizedMongoDbException.class);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeOnCollectionWithTypeShouldReturnFindResults() throws Exception {
operations.executeCommand("{ insert: 'person', documents: [{},{},{}]}").block();
TestSubscriber<Document> testSubscriber = TestSubscriber.create();
Flux<Document> execute = operations.execute(Person.class, collection -> collection.find());
execute.subscribe(testSubscriber);
testSubscriber.awaitAndAssertNextValueCount(3).assertComplete();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void executeOnCollectionWithNameShouldReturnFindResults() throws Exception {
operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").block();
TestSubscriber<Document> testSubscriber = TestSubscriber.create();
Flux<Document> execute = operations.execute("execute_test", collection -> collection.find());
execute.subscribe(testSubscriber);
testSubscriber.awaitAndAssertNextValueCount(3).assertComplete();
}
}

View File

@@ -0,0 +1,210 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.List;
import org.bson.Document;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.annotation.Id;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.index.Index;
import org.springframework.data.mongodb.core.index.Index.Duplicates;
import org.springframework.data.mongodb.core.index.IndexField;
import org.springframework.data.mongodb.core.index.IndexInfo;
import org.springframework.data.util.Version;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.mongodb.reactivestreams.client.ListIndexesPublisher;
import com.mongodb.reactivestreams.client.MongoCollection;
import lombok.Data;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.TestSubscriber;
/**
* Integration test for {@link MongoTemplate}.
*
* @author Mark Paluch
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:reactive-infrastructure.xml")
public class ReactiveMongoTemplateIndexTests {
private static final org.springframework.data.util.Version TWO_DOT_EIGHT = org.springframework.data.util.Version
.parse("2.8");
@Autowired SimpleReactiveMongoDatabaseFactory factory;
@Autowired ReactiveMongoTemplate template;
@Rule public ExpectedException thrown = ExpectedException.none();
Version mongoVersion;
@Before
public void setUp() {
cleanDb();
queryMongoVersionIfNecessary();
}
@After
public void cleanUp() {}
private void queryMongoVersionIfNecessary() {
if (mongoVersion == null) {
org.bson.Document result = template.executeCommand("{ buildInfo: 1 }").block();
mongoVersion = Version.parse(result.get("version").toString());
}
}
private void cleanDb() {
template.dropCollection(Person.class).block();
}
/**
* @see DATAMONGO-1444
*/
@Test
@SuppressWarnings("deprecation")
public void testEnsureIndexShouldCreateIndex() {
Person p1 = new Person("Oliver");
p1.setAge(25);
template.insert(p1);
Person p2 = new Person("Sven");
p2.setAge(40);
template.insert(p2);
template.reactiveIndexOps(Person.class).ensureIndex(new Index().on("age", Direction.DESC).unique())
.block();
MongoCollection<Document> coll = template.getCollection(template.getCollectionName(Person.class));
List<Document> indexInfo = Flux.from(coll.listIndexes()).collectList().block();
assertThat(indexInfo.size(), is(2));
Object indexKey = null;
boolean unique = false;
for (org.bson.Document ix : indexInfo) {
if ("age_-1".equals(ix.get("name"))) {
indexKey = ix.get("key");
unique = (Boolean) ix.get("unique");
}
}
assertThat(((org.bson.Document) indexKey), hasEntry("age", -1));
assertThat(unique, is(true));
}
/**
* @see DATAMONGO-1444
*/
@Test
@SuppressWarnings("deprecation")
public void getIndexInfoShouldReturnCorrectIndex() {
Person p1 = new Person("Oliver");
p1.setAge(25);
template.insert(p1).block();
template.reactiveIndexOps(Person.class).ensureIndex(new Index().on("age", Direction.DESC).unique())
.block();
List<IndexInfo> indexInfoList = Flux.from(template.reactiveIndexOps(Person.class).getIndexInfo()).collectList()
.block();
assertThat(indexInfoList.size(), is(2));
IndexInfo ii = indexInfoList.get(1);
assertThat(ii.isUnique(), is(true));
assertThat(ii.isDropDuplicates(), is(false));
assertThat(ii.isSparse(), is(false));
List<IndexField> indexFields = ii.getIndexFields();
IndexField field = indexFields.get(0);
assertThat(field, is(IndexField.create("age", Direction.DESC)));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() {
String command = "db." + template.getCollectionName(Person.class)
+ ".createIndex({'age':-1}, {'unique':true, 'sparse':true}), 1";
template.reactiveIndexOps(Person.class).dropAllIndexes().block();
TestSubscriber<IndexInfo> subscriber = TestSubscriber
.subscribe(template.reactiveIndexOps(Person.class).getIndexInfo());
subscriber.await().assertComplete().assertNoValues();
Mono.from(factory.getMongoDatabase().runCommand(new org.bson.Document("eval", command))).block();
ListIndexesPublisher<Document> listIndexesPublisher = template
.getCollection(template.getCollectionName(Person.class)).listIndexes();
List<Document> indexInfo = Flux.from(listIndexesPublisher).collectList().block();
org.bson.Document indexKey = null;
boolean unique = false;
for (Document document : indexInfo) {
if ("age_-1".equals(document.get("name"))) {
indexKey = (org.bson.Document) document.get("key");
unique = (Boolean) document.get("unique");
}
}
assertThat(indexKey, hasEntry("age", -1D));
assertThat(unique, is(true));
List<IndexInfo> indexInfos = template.reactiveIndexOps(Person.class).getIndexInfo().collectList().block();
IndexInfo info = indexInfos.get(1);
assertThat(info.isUnique(), is(true));
assertThat(info.isSparse(), is(true));
List<IndexField> indexFields = info.getIndexFields();
IndexField field = indexFields.get(0);
assertThat(field, is(IndexField.create("age", Direction.DESC)));
}
@Data
static class Sample {
@Id String id;
String field;
public Sample() {}
public Sample(String id, String field) {
this.id = id;
this.field = field;
}
}
}

View File

@@ -0,0 +1,86 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate.NoOpDbRefResolver;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.reactivestreams.client.MongoClient;
/**
* Unit tests for {@link ReactiveMongoTemplate}.
*
* @author Mark Paluch
*/
@RunWith(MockitoJUnitRunner.class)
public class ReactiveMongoTemplateUnitTests {
ReactiveMongoTemplate template;
@Mock SimpleReactiveMongoDatabaseFactory factory;
@Mock MongoClient mongoClient;
MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator();
MappingMongoConverter converter;
MongoMappingContext mappingContext;
@Before
public void setUp() {
when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator);
this.mappingContext = new MongoMappingContext();
this.converter = new MappingMongoConverter(new NoOpDbRefResolver(), mappingContext);
this.template = new ReactiveMongoTemplate(factory, converter);
}
/**
* @see DATAMONGO-1444
*/
@Test(expected = IllegalArgumentException.class)
public void rejectsNullDatabaseName() throws Exception {
new ReactiveMongoTemplate(mongoClient, null);
}
/**
* @see DATAMONGO-1444
*/
@Test(expected = IllegalArgumentException.class)
public void rejectsNullMongo() throws Exception {
new ReactiveMongoTemplate(null, "database");
}
/**
* @see DATAMONGO-1444
*/
@Test
public void defaultsConverterToMappingMongoConverter() throws Exception {
ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoClient, "database");
assertTrue(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter);
}
}

View File

@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.when;
import org.bson.Document;
import org.junit.Before;
@@ -40,12 +41,15 @@ import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
public class UnwrapAndReadDocumentCallbackUnitTests {
@Mock MongoDbFactory factory;
@Mock MongoExceptionTranslator exceptionTranslatorMock;
UnwrapAndReadDocumentCallback<Target> callback;
@Before
public void setUp() {
when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock);
MongoTemplate template = new MongoTemplate(factory);
MappingMongoConverter converter = new MappingMongoConverter(new DefaultDbRefResolver(factory),
new MongoMappingContext());

View File

@@ -259,7 +259,7 @@ public class MappingMongoConverterUnitTests {
List<Object> enums = (List<Object>) result.get("enums");
assertThat(enums.size(), is(1));
assertThat((String) enums.get(0), is("FIRST"));
assertThat(enums.get(0), is("FIRST"));
}
/**
@@ -387,7 +387,7 @@ public class MappingMongoConverterUnitTests {
Object localeField = document.get("locale");
assertThat(localeField, is(instanceOf(String.class)));
assertThat((String) localeField, is("en_US"));
assertThat(localeField, is("en_US"));
LocaleWrapper read = converter.read(LocaleWrapper.class, document);
assertThat(read.locale, is(Locale.US));
@@ -584,7 +584,7 @@ public class MappingMongoConverterUnitTests {
public void convertsObjectsIfNecessary() {
ObjectId id = new ObjectId();
assertThat(converter.convertToMongoType(id), is((Object) id));
assertThat(converter.convertToMongoType(id), is(id));
}
/**
@@ -608,7 +608,7 @@ public class MappingMongoConverterUnitTests {
List<Object> value = (List<Object>) foo;
assertThat(value.size(), is(1));
assertThat((String) value.get(0), is("Bar"));
assertThat(value.get(0), is("Bar"));
}
/**
@@ -647,7 +647,7 @@ public class MappingMongoConverterUnitTests {
BasicDBList value = (BasicDBList) foo;
assertThat(value.size(), is(1));
assertThat((String) value.get(0), is("Bar"));
assertThat(value.get(0), is("Bar"));
}
/**
@@ -677,7 +677,7 @@ public class MappingMongoConverterUnitTests {
ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source);
Object firstObjectInFoo = ((List<?>) result.mapOfObjects.get("Foo")).get(0);
assertThat(firstObjectInFoo, is(instanceOf(Map.class)));
assertThat((String) ((Map<?, ?>) firstObjectInFoo).get("Hello"), is(equalTo("World")));
assertThat(((Map<?, ?>) firstObjectInFoo).get("Hello"), is(equalTo("World")));
}
/**
@@ -697,7 +697,7 @@ public class MappingMongoConverterUnitTests {
assertThat(foo, is(instanceOf(Map.class)));
Object doublyNestedObject = ((Map<?, ?>) foo).get("nested");
assertThat(doublyNestedObject, is(instanceOf(Map.class)));
assertThat((String) ((Map<?, ?>) doublyNestedObject).get("Hello"), is(equalTo("World")));
assertThat(((Map<?, ?>) doublyNestedObject).get("Hello"), is(equalTo("World")));
}
/**
@@ -719,7 +719,7 @@ public class MappingMongoConverterUnitTests {
assertThat(firstObjectInFoo, is(instanceOf(Map.class)));
Object doublyNestedObject = ((Map<?, ?>) firstObjectInFoo).get("nested");
assertThat(doublyNestedObject, is(instanceOf(Map.class)));
assertThat((String) ((Map<?, ?>) doublyNestedObject).get("Hello"), is(equalTo("World")));
assertThat(((Map<?, ?>) doublyNestedObject).get("Hello"), is(equalTo("World")));
}
/**
@@ -784,7 +784,7 @@ public class MappingMongoConverterUnitTests {
BasicDBList list = (BasicDBList) result.get("Foo");
assertThat(list.size(), is(1));
assertThat(list.get(0), is((Object) Locale.US.toString()));
assertThat(list.get(0), is(Locale.US.toString()));
}
/**
@@ -886,17 +886,17 @@ public class MappingMongoConverterUnitTests {
converter.write(a, result);
assertThat((String) result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName()));
assertThat((String) result.get("valueType"), is(HashMap.class.getName()));
assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName()));
assertThat(result.get("valueType"), is(HashMap.class.getName()));
org.bson.Document object = (org.bson.Document) result.get("value");
assertThat(object, is(notNullValue()));
org.bson.Document inner = (org.bson.Document) object.get("test");
assertThat(inner, is(notNullValue()));
assertThat((String) inner.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName()));
assertThat((String) inner.get("valueType"), is(String.class.getName()));
assertThat((String) inner.get("value"), is("testValue"));
assertThat(inner.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName()));
assertThat(inner.get("valueType"), is(String.class.getName()));
assertThat(inner.get("value"), is("testValue"));
}
@Test
@@ -908,7 +908,7 @@ public class MappingMongoConverterUnitTests {
org.bson.Document result = new org.bson.Document();
converter.write(value, result);
assertThat(result.get("_id"), is((Object) 5));
assertThat(result.get("_id"), is(5));
}
/**
@@ -919,7 +919,7 @@ public class MappingMongoConverterUnitTests {
public void writesNullValuesForCollection() {
CollectionWrapper wrapper = new CollectionWrapper();
wrapper.contacts = Arrays.<Contact> asList(new Person(), null);
wrapper.contacts = Arrays.asList(new Person(), null);
org.bson.Document result = new org.bson.Document();
converter.write(wrapper, result);
@@ -1161,7 +1161,7 @@ public class MappingMongoConverterUnitTests {
converter.write(wrapper, sink);
assertThat(sink.get("url"), is((Object) "http://springsource.org"));
assertThat(sink.get("url"), is("http://springsource.org"));
}
/**
@@ -1192,7 +1192,7 @@ public class MappingMongoConverterUnitTests {
Object idField = document.get("_id");
assertThat(idField, is(notNullValue()));
assertThat(idField, is(instanceOf(org.bson.Document.class)));
assertThat(((org.bson.Document) idField).get("innerId"), is((Object) 4711L));
assertThat(((org.bson.Document) idField).get("innerId"), is(4711L));
}
/**
@@ -1544,7 +1544,7 @@ public class MappingMongoConverterUnitTests {
org.bson.Document map = getAsDocument(result, "treeMapOfPersons");
org.bson.Document entry = getAsDocument(map, "key");
assertThat(entry.get("foo"), is((Object) "Dave"));
assertThat(entry.get("foo"), is("Dave"));
}
/**
@@ -1771,7 +1771,7 @@ public class MappingMongoConverterUnitTests {
ClassWithGeoShape result = converter.read(ClassWithGeoShape.class, document);
assertThat(result, is(notNullValue()));
assertThat(result.shape, is((Shape) sphere));
assertThat(result.shape, is(sphere));
}
/**
@@ -1883,8 +1883,8 @@ public class MappingMongoConverterUnitTests {
org.bson.Document sink = new org.bson.Document();
converter.write(source, sink);
assertThat((String) sink.get("_id"), is("rootId"));
assertThat((org.bson.Document) sink.get("nested"), is(new org.bson.Document().append("id", "nestedId")));
assertThat(sink.get("_id"), is("rootId"));
assertThat(sink.get("nested"), is(new org.bson.Document().append("id", "nestedId")));
}
/**
@@ -1973,7 +1973,7 @@ public class MappingMongoConverterUnitTests {
converter.write(type, result);
assertThat(getAsDocument(result, "string"), is((org.bson.Document) new org.bson.Document()));
assertThat(getAsDocument(result, "string"), is(new org.bson.Document()));
org.bson.Document localDateTime = getAsDocument(result, "localDateTime");
assertThat(localDateTime.get("value"), is(instanceOf(Date.class)));
@@ -2103,7 +2103,7 @@ public class MappingMongoConverterUnitTests {
EnumMap<SampleEnum, String> enumMap;
}
static enum SampleEnum {
enum SampleEnum {
FIRST {
@Override
void method() {}
@@ -2118,7 +2118,7 @@ public class MappingMongoConverterUnitTests {
abstract void method();
}
static interface InterfaceType {
interface InterfaceType {
}
@@ -2396,8 +2396,8 @@ public class MappingMongoConverterUnitTests {
static class ClassWithMapUsingEnumAsKey {
static enum FooBarEnum {
FOO, BAR;
enum FooBarEnum {
FOO, BAR
}
Map<FooBarEnum, String> map;

View File

@@ -36,6 +36,7 @@ import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.data.annotation.Id;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
import org.springframework.data.mongodb.core.mapping.Document;
@@ -107,8 +108,10 @@ public class MongoPersistentEntityIndexCreatorIntegrationTests {
expectedException.expectMessage("lastname");
expectedException.expectCause(IsInstanceOf.<Throwable> instanceOf(MongoCommandException.class));
MongoTemplate mongoTemplate = new MongoTemplate(new MongoClient(), "issue");
MongoPersistentEntityIndexCreator indexCreator = new MongoPersistentEntityIndexCreator(new MongoMappingContext(),
new SimpleMongoDbFactory(new MongoClient(), "issue"));
mongoTemplate);
indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", new Index().named("stormlight")
.on("lastname", Direction.ASC).unique(), "datamongo-1125"));

View File

@@ -38,7 +38,9 @@ import org.springframework.dao.DataAccessException;
import org.springframework.data.geo.Point;
import org.springframework.data.mapping.context.MappingContextEvent;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.DefaultIndexOperations;
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.Field;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
@@ -58,6 +60,7 @@ import com.mongodb.client.model.IndexOptions;
* @author Johno Crawford
* @author Christoph Strobl
* @author Thomas Darimont
* @author Mark Paluch
*/
@RunWith(MockitoJUnitRunner.class)
public class MongoPersistentEntityIndexCreatorUnitTests {
@@ -65,7 +68,8 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
private @Mock MongoDbFactory factory;
private @Mock ApplicationContext context;
private @Mock MongoDatabase db;
private @Mock MongoCollection<Document> collection;
private @Mock MongoCollection<org.bson.Document> collection;
private MongoTemplate mongoTemplate;
ArgumentCaptor<org.bson.Document> keysCaptor;
ArgumentCaptor<IndexOptions> optionsCaptor;
@@ -79,7 +83,10 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
collectionCaptor = ArgumentCaptor.forClass(String.class);
when(factory.getDb()).thenReturn(db);
when(db.getCollection(collectionCaptor.capture(), eq(Document.class))).thenReturn(collection);
when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator());
when(db.getCollection(collectionCaptor.capture())).thenReturn(collection);
mongoTemplate = new MongoTemplate(factory);
when(collection.createIndex(keysCaptor.capture(), optionsCaptor.capture())).thenReturn("OK");
}
@@ -89,7 +96,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
MongoMappingContext mappingContext = prepareMappingContext(Person.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
assertThat(keysCaptor.getValue(), is(notNullValue()));
assertThat(keysCaptor.getValue().keySet(), hasItem("fieldname"));
@@ -104,7 +111,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
MongoMappingContext mappingContext = new MongoMappingContext();
MongoMappingContext personMappingContext = prepareMappingContext(Person.class);
MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, factory);
MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
MongoPersistentEntity<?> entity = personMappingContext.getPersistentEntity(Person.class);
MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty> event = new MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>(
@@ -124,7 +131,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
MongoMappingContext mappingContext = new MongoMappingContext();
mappingContext.initialize();
MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, factory);
MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
assertThat(creator.isIndexCreatorFor(mappingContext), is(true));
assertThat(creator.isIndexCreatorFor(new MongoMappingContext()), is(false));
}
@@ -136,7 +143,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
public void triggersBackgroundIndexingIfConfigured() {
MongoMappingContext mappingContext = prepareMappingContext(AnotherPerson.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
assertThat(keysCaptor.getValue(), is(notNullValue()));
assertThat(keysCaptor.getValue().keySet(), hasItem("lastname"));
@@ -152,7 +159,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
public void expireAfterSecondsIfConfigured() {
MongoMappingContext mappingContext = prepareMappingContext(Milk.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
assertThat(keysCaptor.getValue(), is(notNullValue()));
assertThat(keysCaptor.getValue().keySet(), hasItem("expiry"));
@@ -166,7 +173,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
public void createsNotNestedGeoSpatialIndexCorrectly() {
MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
assertThat(keysCaptor.getValue(), equalTo(new org.bson.Document().append("company.address.location", "2d")));
@@ -184,7 +191,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
public void autoGeneratedIndexNameShouldGenerateNoName() {
MongoMappingContext mappingContext = prepareMappingContext(EntityWithGeneratedIndexName.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
assertThat(keysCaptor.getValue().containsKey("name"), is(false));
assertThat(keysCaptor.getValue().keySet(), hasItem("lastname"));
@@ -199,11 +206,11 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
public void indexCreationShouldNotCreateNewCollectionForNestedGeoSpatialIndexStructures() {
MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
ArgumentCaptor<String> collectionNameCapturer = ArgumentCaptor.forClass(String.class);
verify(db, times(1)).getCollection(collectionNameCapturer.capture(), eq(Document.class));
verify(db, times(1)).getCollection(collectionNameCapturer.capture());
assertThat(collectionNameCapturer.getValue(), equalTo("wrapper"));
}
@@ -214,11 +221,11 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
public void indexCreationShouldNotCreateNewCollectionForNestedIndexStructures() {
MongoMappingContext mappingContext = prepareMappingContext(IndexedDocumentWrapper.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
ArgumentCaptor<String> collectionNameCapturer = ArgumentCaptor.forClass(String.class);
verify(db, times(1)).getCollection(collectionNameCapturer.capture(), eq(Document.class));
verify(db, times(1)).getCollection(collectionNameCapturer.capture());
assertThat(collectionNameCapturer.getValue(), equalTo("indexedDocumentWrapper"));
}
@@ -234,7 +241,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
MongoMappingContext mappingContext = prepareMappingContext(Person.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
}
/**
@@ -249,7 +256,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests {
MongoMappingContext mappingContext = prepareMappingContext(Person.class);
new MongoPersistentEntityIndexCreator(mappingContext, factory);
new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate);
}
private static MongoMappingContext prepareMappingContext(Class<?> type) {

View File

@@ -0,0 +1,995 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.performance;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import static org.springframework.data.mongodb.core.query.Query.*;
import static org.springframework.util.Assert.*;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.junit.Before;
import org.junit.Test;
import org.springframework.core.Constants;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.data.annotation.PersistenceConstructor;
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.convert.DbRefProxyHandler;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.DbRefResolverCallback;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.ReactiveMongoRepository;
import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory;
import org.springframework.util.StopWatch;
import org.springframework.util.StringUtils;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBRef;
import com.mongodb.WriteConcern;
import com.mongodb.client.model.CreateCollectionOptions;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoClients;
import com.mongodb.reactivestreams.client.MongoCollection;
import com.mongodb.reactivestreams.client.MongoDatabase;
/**
* Test class to execute performance tests for plain Reactive Streams MongoDB driver usage, {@link ReactiveMongoOperations} and the repositories
* abstraction.
*
* @author Mark Paluch
*/
public class ReactivePerformanceTests {
private static final String DATABASE_NAME = "performance";
private static final int NUMBER_OF_PERSONS = 300;
private static final int ITERATIONS = 50;
private static final StopWatch watch = new StopWatch();
private static final Collection<String> IGNORED_WRITE_CONCERNS = Arrays.asList("MAJORITY", "REPLICAS_SAFE",
"FSYNC_SAFE", "FSYNCED", "JOURNAL_SAFE", "JOURNALED", "REPLICA_ACKNOWLEDGED");
private static final int COLLECTION_SIZE = 1024 * 1024 * 256; // 256 MB
private static final Collection<String> COLLECTION_NAMES = Arrays.asList("template", "driver", "person");
MongoClient mongo;
ReactiveMongoTemplate operations;
ReactivePersonRepository repository;
MongoConverter converter;
@Before
public void setUp() throws Exception {
this.mongo = MongoClients.create();
SimpleReactiveMongoDatabaseFactory mongoDbFactory = new SimpleReactiveMongoDatabaseFactory(this.mongo, DATABASE_NAME);
MongoMappingContext context = new MongoMappingContext();
context.setInitialEntitySet(Collections.singleton(Person.class));
context.afterPropertiesSet();
this.converter = new MappingMongoConverter(new DbRefResolver() {
@Override
public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler proxyHandler) {
return null;
}
@Override
public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, MongoPersistentEntity<?> entity, Object id) {
return null;
}
@Override
public Document fetch(DBRef dbRef) {
return null;
}
@Override
public List<Document> bulkFetch(List<DBRef> dbRefs) {
return null;
}
}, context);
this.operations = new ReactiveMongoTemplate(mongoDbFactory, converter);
ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(operations);
factory.setConversionService(new GenericConversionService());
this.repository = factory.getRepository(ReactivePersonRepository.class);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void writeWithWriteConcerns() {
executeWithWriteConcerns(new WriteConcernCallback() {
public void doWithWriteConcern(String constantName, WriteConcern concern) {
writeHeadline("WriteConcern: " + constantName);
System.out.println(String.format("Writing %s objects using plain driver took %sms", NUMBER_OF_PERSONS,
writingObjectsUsingPlainDriver(NUMBER_OF_PERSONS, concern)));
System.out.println(String.format("Writing %s objects using template took %sms", NUMBER_OF_PERSONS,
writingObjectsUsingMongoTemplate(NUMBER_OF_PERSONS, concern)));
System.out.println(String.format("Writing %s objects using repository took %sms", NUMBER_OF_PERSONS,
writingObjectsUsingRepositories(NUMBER_OF_PERSONS, concern)));
System.out.println(String.format("Writing %s objects async using plain driver took %sms", NUMBER_OF_PERSONS,
writingAsyncObjectsUsingPlainDriver(NUMBER_OF_PERSONS, concern)));
System.out.println(String.format("Writing %s objects async using template took %sms", NUMBER_OF_PERSONS,
writingAsyncObjectsUsingMongoTemplate(NUMBER_OF_PERSONS, concern)));
System.out.println(String.format("Writing %s objects async using repository took %sms", NUMBER_OF_PERSONS,
writingAsyncObjectsUsingRepositories(NUMBER_OF_PERSONS, concern)));
writeFooter();
}
});
}
@Test
public void plainConversion() throws InterruptedException {
Statistics statistics = new Statistics(
"Plain conversion of " + NUMBER_OF_PERSONS * 100 + " persons - After %s iterations");
List<Document> dbObjects = getPersonDocuments(NUMBER_OF_PERSONS * 100);
for (int i = 0; i < ITERATIONS; i++) {
statistics.registerTime(Api.DIRECT, Mode.READ, convertDirectly(dbObjects));
statistics.registerTime(Api.CONVERTER, Mode.READ, convertUsingConverter(dbObjects));
}
statistics.printResults(ITERATIONS);
}
private long convertDirectly(final List<Document> dbObjects) {
executeWatched(new WatchCallback<List<Person>>() {
@Override
public List<Person> doInWatch() {
List<Person> persons = new ArrayList<ReactivePerformanceTests.Person>();
for (Document dbObject : dbObjects) {
persons.add(Person.from(new Document(dbObject)));
}
return persons;
}
});
return watch.getLastTaskTimeMillis();
}
private long convertUsingConverter(final List<Document> dbObjects) {
executeWatched(new WatchCallback<List<Person>>() {
@Override
public List<Person> doInWatch() {
List<Person> persons = new ArrayList<ReactivePerformanceTests.Person>();
for (Document dbObject : dbObjects) {
persons.add(converter.read(Person.class, dbObject));
}
return persons;
}
});
return watch.getLastTaskTimeMillis();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void writeAndRead() throws Exception {
readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS, WriteConcern.SAFE);
}
private void readsAndWrites(int numberOfPersons, int iterations, WriteConcern concern) {
Statistics statistics = new Statistics("Reading " + numberOfPersons + " - After %s iterations");
for (int i = 0; i < iterations; i++) {
setupCollections();
statistics.registerTime(Api.DRIVER, Mode.WRITE, writingObjectsUsingPlainDriver(numberOfPersons, concern));
statistics.registerTime(Api.TEMPLATE, Mode.WRITE, writingObjectsUsingMongoTemplate(numberOfPersons, concern));
statistics.registerTime(Api.REPOSITORY, Mode.WRITE, writingObjectsUsingRepositories(numberOfPersons, concern));
statistics.registerTime(Api.DRIVER, Mode.WRITE_ASYNC, writingAsyncObjectsUsingPlainDriver(numberOfPersons, concern));
statistics.registerTime(Api.TEMPLATE, Mode.WRITE_ASYNC, writingAsyncObjectsUsingMongoTemplate(numberOfPersons, concern));
statistics.registerTime(Api.REPOSITORY, Mode.WRITE_ASYNC, writingAsyncObjectsUsingRepositories(numberOfPersons, concern));
statistics.registerTime(Api.DRIVER, Mode.READ, readingUsingPlainDriver());
statistics.registerTime(Api.TEMPLATE, Mode.READ, readingUsingTemplate());
statistics.registerTime(Api.REPOSITORY, Mode.READ, readingUsingRepository());
statistics.registerTime(Api.DRIVER, Mode.QUERY, queryUsingPlainDriver());
statistics.registerTime(Api.TEMPLATE, Mode.QUERY, queryUsingTemplate());
statistics.registerTime(Api.REPOSITORY, Mode.QUERY, queryUsingRepository());
if (i > 0 && i % (iterations / 10) == 0) {
statistics.printResults(i);
}
}
statistics.printResults(iterations);
}
private void writeHeadline(String headline) {
System.out.println(headline);
System.out.println(createUnderline(headline));
}
private void writeFooter() {
System.out.println();
}
private long queryUsingTemplate() {
executeWatched(new WatchCallback<List<Person>>() {
public List<Person> doInWatch() {
Query query = query(where("addresses.zipCode").regex(".*1.*"));
return operations.find(query, Person.class, "template").collectList().block();
}
});
return watch.getLastTaskTimeMillis();
}
private long queryUsingRepository() {
executeWatched(new WatchCallback<List<Person>>() {
public List<Person> doInWatch() {
return repository.findByAddressesZipCodeContaining("1").collectList().block();
}
});
return watch.getLastTaskTimeMillis();
}
private void executeWithWriteConcerns(WriteConcernCallback callback) {
Constants constants = new Constants(WriteConcern.class);
for (String constantName : constants.getNames(null)) {
if (IGNORED_WRITE_CONCERNS.contains(constantName)) {
continue;
}
WriteConcern writeConcern = (WriteConcern) constants.asObject(constantName);
setupCollections();
callback.doWithWriteConcern(constantName, writeConcern);
}
}
private void setupCollections() {
MongoDatabase db = this.mongo.getDatabase(DATABASE_NAME);
for (String collectionName : COLLECTION_NAMES) {
MongoCollection<Document> collection = db.getCollection(collectionName);
Mono.from(collection.drop()).block();
Mono.from(db.createCollection(collectionName, getCreateCollectionOptions())).block();
collection.createIndex(new BasicDBObject("firstname", -1));
collection.createIndex(new BasicDBObject("lastname", -1));
}
}
private CreateCollectionOptions getCreateCollectionOptions() {
CreateCollectionOptions options = new CreateCollectionOptions();
return options.sizeInBytes(COLLECTION_SIZE).capped(false);
}
private long writingObjectsUsingPlainDriver(int numberOfPersons, WriteConcern concern) {
final MongoCollection<Document> collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver").withWriteConcern(concern);
final List<Person> persons = getPersonObjects(numberOfPersons);
executeWatched(new WatchCallback<Void>() {
public Void doInWatch() {
for (Person person : persons) {
Mono.from(collection.insertOne(new Document(person.toDocument()))).block();
}
return null;
}
});
return watch.getLastTaskTimeMillis();
}
private long writingObjectsUsingRepositories(int numberOfPersons, WriteConcern concern) {
final List<Person> persons = getPersonObjects(numberOfPersons);
operations.setWriteConcern(concern);
executeWatched(new WatchCallback<Void>() {
public Void doInWatch() {
for (Person person : persons) {
repository.save(person).block();
}
return null;
}
});
return watch.getLastTaskTimeMillis();
}
private long writingObjectsUsingMongoTemplate(int numberOfPersons, WriteConcern concern) {
final List<Person> persons = getPersonObjects(numberOfPersons);
executeWatched(new WatchCallback<Void>() {
public Void doInWatch() {
operations.setWriteConcern(concern);
for (Person person : persons) {
Mono.from(operations.save(person, "template")).block();
}
return null;
}
});
return watch.getLastTaskTimeMillis();
}
private long writingAsyncObjectsUsingPlainDriver(int numberOfPersons, WriteConcern concern) {
final MongoCollection<Document> collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver").withWriteConcern(concern);
final List<Person> persons = getPersonObjects(numberOfPersons);
executeWatched(new WatchCallback<Void>() {
public Void doInWatch() {
Flux.from(collection.insertMany(persons.stream().map(person -> new Document(person.toDocument())).collect(Collectors.toList()))).then().block();
return null;
}
});
return watch.getLastTaskTimeMillis();
}
private long writingAsyncObjectsUsingRepositories(int numberOfPersons, WriteConcern concern) {
final List<Person> persons = getPersonObjects(numberOfPersons);
operations.setWriteConcern(concern);
executeWatched(new WatchCallback<Void>() {
public Void doInWatch() {
repository.save(persons).then().block();
return null;
}
});
return watch.getLastTaskTimeMillis();
}
private long writingAsyncObjectsUsingMongoTemplate(int numberOfPersons, WriteConcern concern) {
final List<Person> persons = getPersonObjects(numberOfPersons);
executeWatched(new WatchCallback<Void>() {
public Void doInWatch() {
operations.setWriteConcern(concern);
Flux.from(operations.insertAll(persons)).then().block();
return null;
}
});
return watch.getLastTaskTimeMillis();
}
private long readingUsingPlainDriver() {
executeWatched(new WatchCallback<List<Person>>() {
public List<Person> doInWatch() {
return Flux.from(mongo.getDatabase(DATABASE_NAME).getCollection("driver").find()).map(Person::from).collectList().block();
}
});
return watch.getLastTaskTimeMillis();
}
private long readingUsingTemplate() {
executeWatched(new WatchCallback<List<Person>>() {
public List<Person> doInWatch() {
return operations.findAll(Person.class, "template").collectList().block();
}
});
return watch.getLastTaskTimeMillis();
}
private long readingUsingRepository() {
executeWatched(new WatchCallback<List<Person>>() {
public List<Person> doInWatch() {
return repository.findAll().collectList().block();
}
});
return watch.getLastTaskTimeMillis();
}
private long queryUsingPlainDriver() {
executeWatched(new WatchCallback<List<Person>>() {
public List<Person> doInWatch() {
MongoCollection<Document> collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver");
Document regex = new Document("$regex", Pattern.compile(".*1.*"));
Document query = new Document("addresses.zipCode", regex);
return Flux.from(collection.find(query)).map(Person::from).collectList().block();
}
});
return watch.getLastTaskTimeMillis();
}
private List<Person> getPersonObjects(int numberOfPersons) {
List<Person> result = new ArrayList<Person>();
for (int i = 0; i < numberOfPersons; i++) {
List<Address> addresses = new ArrayList<Address>();
for (int a = 0; a < 5; a++) {
addresses.add(new Address("zip" + a, "city" + a));
}
Person person = new Person("Firstname" + i, "Lastname" + i, addresses);
for (int o = 0; o < 10; o++) {
person.orders.add(new Order(LineItem.generate()));
}
result.add(person);
}
return result;
}
private List<Document> getPersonDocuments(int numberOfPersons) {
List<Document> dbObjects = new ArrayList<Document>(numberOfPersons);
for (Person person : getPersonObjects(numberOfPersons)) {
dbObjects.add(person.toDocument());
}
return dbObjects;
}
private <T> T executeWatched(WatchCallback<T> callback) {
watch.start();
try {
return callback.doInWatch();
} finally {
watch.stop();
}
}
static class Person {
ObjectId id;
String firstname, lastname;
List<Address> addresses;
Set<Order> orders;
public Person(String firstname, String lastname, List<Address> addresses) {
this.firstname = firstname;
this.lastname = lastname;
this.addresses = addresses;
this.orders = new HashSet<Order>();
}
public static Person from(Document source) {
List<Document> addressesSource = (List<Document>) source.get("addresses");
List<Address> addresses = new ArrayList<Address>(addressesSource.size());
for (Object addressSource : addressesSource) {
addresses.add(Address.from((Document) addressSource));
}
List<Document> ordersSource = (List<Document>) source.get("orders");
Set<Order> orders = new HashSet<Order>(ordersSource.size());
for (Object orderSource : ordersSource) {
orders.add(Order.from((Document) orderSource));
}
Person person = new Person((String) source.get("firstname"), (String) source.get("lastname"), addresses);
person.orders.addAll(orders);
return person;
}
public Document toDocument() {
Document dbObject = new Document();
dbObject.put("firstname", firstname);
dbObject.put("lastname", lastname);
dbObject.put("addresses", writeAll(addresses));
dbObject.put("orders", writeAll(orders));
return dbObject;
}
}
static class Address implements Convertible {
final String zipCode;
final String city;
final Set<AddressType> types;
public Address(String zipCode, String city) {
this(zipCode, city, new HashSet<AddressType>(pickRandomNumerOfItemsFrom(Arrays.asList(AddressType.values()))));
}
@PersistenceConstructor
public Address(String zipCode, String city, Set<AddressType> types) {
this.zipCode = zipCode;
this.city = city;
this.types = types;
}
public static Address from(Document source) {
String zipCode = (String) source.get("zipCode");
String city = (String) source.get("city");
List types = (List) source.get("types");
return new Address(zipCode, city, new HashSet<AddressType>(fromList(types, AddressType.class)));
}
public Document toDocument() {
Document dbObject = new Document();
dbObject.put("zipCode", zipCode);
dbObject.put("city", city);
dbObject.put("types", toList(types));
return dbObject;
}
}
private static <T extends Enum<T>> List<T> fromList(List source, Class<T> type) {
List<T> result = new ArrayList<T>(source.size());
for (Object object : source) {
result.add(Enum.valueOf(type, object.toString()));
}
return result;
}
private static <T extends Enum<T>> List toList(Collection<T> enums) {
List<String> result = new ArrayList<>();
for (T element : enums) {
result.add(element.toString());
}
return result;
}
static class Order implements Convertible {
enum Status {
ORDERED, PAYED, SHIPPED
}
Date createdAt;
List<LineItem> lineItems;
Status status;
public Order(List<LineItem> lineItems, Date createdAt) {
this.lineItems = lineItems;
this.createdAt = createdAt;
this.status = Status.ORDERED;
}
@PersistenceConstructor
public Order(List<LineItem> lineItems, Date createdAt, Status status) {
this.lineItems = lineItems;
this.createdAt = createdAt;
this.status = status;
}
public static Order from(Document source) {
List lineItemsSource = (List) source.get("lineItems");
List<LineItem> lineItems = new ArrayList<ReactivePerformanceTests.LineItem>(lineItemsSource.size());
for (Object lineItemSource : lineItemsSource) {
lineItems.add(LineItem.from((Document) lineItemSource));
}
Date date = (Date) source.get("createdAt");
Status status = Status.valueOf((String) source.get("status"));
return new Order(lineItems, date, status);
}
public Order(List<LineItem> lineItems) {
this(lineItems, new Date());
}
public Document toDocument() {
Document result = new Document();
result.put("createdAt", createdAt);
result.put("lineItems", writeAll(lineItems));
result.put("status", status.toString());
return result;
}
}
static class LineItem implements Convertible {
String description;
double price;
int amount;
public LineItem(String description, int amount, double price) {
this.description = description;
this.amount = amount;
this.price = price;
}
public static List<LineItem> generate() {
LineItem iPad = new LineItem("iPad", 1, 649);
LineItem iPhone = new LineItem("iPhone", 1, 499);
LineItem macBook = new LineItem("MacBook", 2, 1299);
return pickRandomNumerOfItemsFrom(Arrays.asList(iPad, iPhone, macBook));
}
public static LineItem from(Document source) {
String description = (String) source.get("description");
double price = (Double) source.get("price");
int amount = (Integer) source.get("amount");
return new LineItem(description, amount, price);
}
public Document toDocument() {
Document dbObject = new Document();
dbObject.put("description", description);
dbObject.put("price", price);
dbObject.put("amount", amount);
return dbObject;
}
}
private static <T> List<T> pickRandomNumerOfItemsFrom(List<T> source) {
isTrue(!source.isEmpty());
Random random = new Random();
int numberOfItems = random.nextInt(source.size());
numberOfItems = numberOfItems == 0 ? 1 : numberOfItems;
List<T> result = new ArrayList<T>(numberOfItems);
while (result.size() < numberOfItems) {
int index = random.nextInt(source.size());
T candidate = source.get(index);
if (!result.contains(candidate)) {
result.add(candidate);
}
}
return result;
}
enum AddressType {
SHIPPING, BILLING
}
private interface WriteConcernCallback {
void doWithWriteConcern(String constantName, WriteConcern concern);
}
private interface WatchCallback<T> {
T doInWatch();
}
private interface ReactivePersonRepository extends ReactiveMongoRepository<Person, ObjectId> {
Flux<Person> findByAddressesZipCodeContaining(String parameter);
}
private interface Convertible {
Document toDocument();
}
private static BasicDBList writeAll(Collection<? extends Convertible> convertibles) {
BasicDBList result = new BasicDBList();
for (Convertible convertible : convertibles) {
result.add(convertible.toDocument());
}
return result;
}
enum Api {
DRIVER, TEMPLATE, REPOSITORY, DIRECT, CONVERTER
}
enum Mode {
WRITE, READ, QUERY,
WRITE_ASYNC
}
private static class Statistics {
private final String headline;
private final Map<Mode, ModeTimes> times;
public Statistics(String headline) {
this.headline = headline;
this.times = new HashMap<Mode, ModeTimes>();
for (Mode mode : Mode.values()) {
times.put(mode, new ModeTimes(mode));
}
}
public void registerTime(Api api, Mode mode, double time) {
times.get(mode).add(api, time);
}
public void printResults(int iterations) {
String title = String.format(headline, iterations);
System.out.println(title);
System.out.println(createUnderline(title));
StringBuilder builder = new StringBuilder();
for (Mode mode : Mode.values()) {
String print = times.get(mode).print();
if (!print.isEmpty()) {
builder.append(print).append('\n');
}
}
System.out.println(builder.toString());
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(times.size());
for (ModeTimes times : this.times.values()) {
builder.append(times.toString());
}
return builder.toString();
}
}
private static String createUnderline(String input) {
StringBuilder builder = new StringBuilder(input.length());
for (int i = 0; i < input.length(); i++) {
builder.append("-");
}
return builder.toString();
}
static class ApiTimes {
private static final String TIME_TEMPLATE = "%s %s time -\tAverage: %sms%s,%sMedian: %sms%s";
private static final DecimalFormat TIME_FORMAT;
private static final DecimalFormat DEVIATION_FORMAT;
static {
TIME_FORMAT = new DecimalFormat("0.00");
DEVIATION_FORMAT = new DecimalFormat("0.00");
DEVIATION_FORMAT.setPositivePrefix("+");
}
private final Api api;
private final Mode mode;
private final List<Double> times;
public ApiTimes(Api api, Mode mode) {
this.api = api;
this.mode = mode;
this.times = new ArrayList<Double>();
}
public void add(double time) {
this.times.add(time);
}
public boolean hasTimes() {
return !times.isEmpty();
}
public double getAverage() {
double result = 0;
for (Double time : times) {
result += time;
}
return result == 0.0 ? 0.0 : result / times.size();
}
public double getMedian() {
if (times.isEmpty()) {
return 0.0;
}
ArrayList<Double> list = new ArrayList<Double>(times);
Collections.sort(list);
int size = list.size();
if (size % 2 == 0) {
return (list.get(size / 2 - 1) + list.get(size / 2)) / 2;
} else {
return list.get(size / 2);
}
}
private double getDeviationFrom(double otherAverage) {
double average = getAverage();
return average * 100 / otherAverage - 100;
}
private double getMediaDeviationFrom(double otherMedian) {
double median = getMedian();
return median * 100 / otherMedian - 100;
}
public String print() {
if (times.isEmpty()) {
return "";
}
return basicPrint("", "\t\t", "") + '\n';
}
private String basicPrint(String extension, String middle, String foo) {
return String.format(TIME_TEMPLATE, api, mode, TIME_FORMAT.format(getAverage()), extension, middle,
TIME_FORMAT.format(getMedian()), foo);
}
public String print(double referenceAverage, double referenceMedian) {
if (times.isEmpty()) {
return "";
}
return basicPrint(String.format(" %s%%", DEVIATION_FORMAT.format(getDeviationFrom(referenceAverage))), "\t",
String.format(" %s%%", DEVIATION_FORMAT.format(getMediaDeviationFrom(referenceMedian)))) + '\n';
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return times.isEmpty() ? ""
: String.format("%s, %s: %s", api, mode, StringUtils.collectionToCommaDelimitedString(times)) + '\n';
}
}
static class ModeTimes {
private final Map<Api, ApiTimes> times;
public ModeTimes(Mode mode) {
this.times = new HashMap<Api, ApiTimes>();
for (Api api : Api.values()) {
this.times.put(api, new ApiTimes(api, mode));
}
}
public void add(Api api, double time) {
times.get(api).add(time);
}
@SuppressWarnings("null")
public String print() {
if (times.isEmpty()) {
return "";
}
Double previousTime = null;
Double previousMedian = null;
StringBuilder builder = new StringBuilder();
for (Api api : Api.values()) {
ApiTimes apiTimes = times.get(api);
if (!apiTimes.hasTimes()) {
continue;
}
if (previousTime == null) {
builder.append(apiTimes.print());
previousTime = apiTimes.getAverage();
previousMedian = apiTimes.getMedian();
} else {
builder.append(apiTimes.print(previousTime, previousMedian));
}
}
return builder.toString();
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder(times.size());
for (ApiTimes times : this.times.values()) {
builder.append(times.toString());
}
return builder.toString();
}
}
}

View File

@@ -0,0 +1,293 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.reactivestreams.Publisher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan.Filter;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.ImportResource;
import org.springframework.data.annotation.Id;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories;
import org.springframework.data.repository.RepositoryDefinition;
import org.springframework.data.repository.reactive.ReactivePagingAndSortingRepository;
import org.springframework.data.repository.reactive.RxJavaPagingAndSortingRepository;
import org.springframework.stereotype.Repository;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import lombok.Data;
import lombok.NoArgsConstructor;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.TestSubscriber;
import rx.Observable;
import rx.Single;
/**
* Test for {@link ReactiveMongoRepository} using reactive wrapper type conversion.
*
* @author Mark Paluch
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = ConvertingReactiveMongoRepositoryTests.Config.class)
public class ConvertingReactiveMongoRepositoryTests {
@EnableReactiveMongoRepositories(includeFilters = @Filter(value = Repository.class), considerNestedRepositories = true)
@ImportResource("classpath:reactive-infrastructure.xml")
static class Config {}
@Autowired ReactiveMongoTemplate template;
@Autowired MixedReactivePersonRepostitory reactiveRepository;
@Autowired ReactivePersonRepostitory reactivePersonRepostitory;
@Autowired RxJavaPersonRepostitory rxJavaPersonRepostitory;
ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia;
@Before
public void setUp() throws Exception {
reactiveRepository.deleteAll().block();
dave = new ReactivePerson("Dave", "Matthews", 42);
oliver = new ReactivePerson("Oliver August", "Matthews", 4);
carter = new ReactivePerson("Carter", "Beauford", 49);
boyd = new ReactivePerson("Boyd", "Tinsley", 45);
stefan = new ReactivePerson("Stefan", "Lessard", 34);
leroi = new ReactivePerson("Leroi", "Moore", 41);
alicia = new ReactivePerson("Alicia", "Keys", 30);
TestSubscriber<ReactivePerson> subscriber = TestSubscriber.create();
reactiveRepository.save(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).subscribe(subscriber);
subscriber.await().assertComplete().assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void reactiveStreamsMethodsShouldWork() throws Exception {
TestSubscriber<Boolean> subscriber = TestSubscriber.subscribe(reactivePersonRepostitory.exists(dave.getId()));
subscriber.awaitAndAssertNextValueCount(1).assertValues(true);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void reactiveStreamsQueryMethodsShouldWork() throws Exception {
TestSubscriber<ReactivePerson> subscriber = TestSubscriber
.subscribe(reactivePersonRepostitory.findByLastname(boyd.getLastname()));
subscriber.awaitAndAssertNextValueCount(1).assertValues(boyd);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void simpleRxJavaMethodsShouldWork() throws Exception {
rx.observers.TestSubscriber<Boolean> subscriber = new rx.observers.TestSubscriber<>();
rxJavaPersonRepostitory.exists(dave.getId()).subscribe(subscriber);
subscriber.awaitTerminalEvent();
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValue(true);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void existsWithSingleRxJavaIdMethodsShouldWork() throws Exception {
rx.observers.TestSubscriber<Boolean> subscriber = new rx.observers.TestSubscriber<>();
rxJavaPersonRepostitory.exists(Single.just(dave.getId())).subscribe(subscriber);
subscriber.awaitTerminalEvent();
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValue(true);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void singleRxJavaQueryMethodShouldWork() throws Exception {
rx.observers.TestSubscriber<ReactivePerson> subscriber = new rx.observers.TestSubscriber<>();
rxJavaPersonRepostitory.findByFirstnameAndLastname(dave.getFirstname(), dave.getLastname()).subscribe(subscriber);
subscriber.awaitTerminalEvent();
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValue(dave);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void singleProjectedRxJavaQueryMethodShouldWork() throws Exception {
rx.observers.TestSubscriber<ProjectedPerson> subscriber = new rx.observers.TestSubscriber<>();
rxJavaPersonRepostitory.findProjectedByLastname(carter.getLastname()).subscribe(subscriber);
subscriber.awaitTerminalEvent();
subscriber.assertCompleted();
subscriber.assertNoErrors();
ProjectedPerson projectedPerson = subscriber.getOnNextEvents().get(0);
assertThat(projectedPerson.getFirstname(), is(equalTo(carter.getFirstname())));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void observableRxJavaQueryMethodShouldWork() throws Exception {
rx.observers.TestSubscriber<ReactivePerson> subscriber = new rx.observers.TestSubscriber<>();
rxJavaPersonRepostitory.findByLastname(boyd.getLastname()).subscribe(subscriber);
subscriber.awaitTerminalEvent();
subscriber.assertCompleted();
subscriber.assertNoErrors();
subscriber.assertValue(boyd);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void mixedRepositoryShouldWork() throws Exception {
ReactivePerson value = reactiveRepository.findByLastname(boyd.getLastname()).toBlocking().value();
assertThat(value, is(equalTo(boyd)));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindOneBySingleOfLastName() throws Exception {
ReactivePerson carter = reactiveRepository.findByLastname(Single.just("Beauford")).block();
assertThat(carter.getFirstname(), is(equalTo("Carter")));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindByObservableOfLastNameIn() throws Exception {
List<ReactivePerson> persons = reactiveRepository.findByLastnameIn(Observable.just("Beauford", "Matthews"))
.collectList().block();
assertThat(persons, hasItems(carter, dave, oliver));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindByPublisherOfLastNameInAndAgeGreater() throws Exception {
List<ReactivePerson> persons = reactiveRepository
.findByLastnameInAndAgeGreaterThan(Flux.just("Beauford", "Matthews"), 41).toList().toBlocking().single();
assertThat(persons, hasItems(carter, dave));
}
@Repository
interface ReactivePersonRepostitory extends ReactivePagingAndSortingRepository<ReactivePerson, String> {
Publisher<ReactivePerson> findByLastname(String lastname);
}
@Repository
interface RxJavaPersonRepostitory extends RxJavaPagingAndSortingRepository<ReactivePerson, String> {
Observable<ReactivePerson> findByFirstnameAndLastname(String firstname, String lastname);
Single<ReactivePerson> findByLastname(String lastname);
Single<ProjectedPerson> findProjectedByLastname(String lastname);
}
@Repository
interface MixedReactivePersonRepostitory extends ReactiveMongoRepository<ReactivePerson, String> {
Single<ReactivePerson> findByLastname(String lastname);
Mono<ReactivePerson> findByLastname(Single<String> lastname);
Flux<ReactivePerson> findByLastnameIn(Observable<String> lastname);
Flux<ReactivePerson> findByLastname(String lastname, Sort sort);
Observable<ReactivePerson> findByLastnameInAndAgeGreaterThan(Flux<String> lastname, int age);
}
@Document
@Data
@NoArgsConstructor
static class ReactivePerson {
@Id String id;
String firstname;
String lastname;
int age;
public ReactivePerson(String firstname, String lastname, int age) {
this.firstname = firstname;
this.lastname = lastname;
this.age = age;
}
}
interface ProjectedPerson {
String getId();
String getFirstname();
}
}

View File

@@ -0,0 +1,442 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.springframework.data.domain.Sort.Direction.*;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.reactivestreams.Publisher;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanClassLoaderAware;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Order;
import org.springframework.data.geo.Circle;
import org.springframework.data.geo.Distance;
import org.springframework.data.geo.GeoResult;
import org.springframework.data.geo.Metrics;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.CollectionOptions;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.repository.Person.Sex;
import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory;
import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.query.DefaultEvaluationContextProvider;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import lombok.NoArgsConstructor;
import reactor.core.Cancellation;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.TestSubscriber;
/**
* Test for {@link ReactiveMongoRepository} query methods.
*
* @author Mark Paluch
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:reactive-infrastructure.xml")
public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanFactoryAware {
@Autowired ReactiveMongoTemplate template;
ReactiveMongoRepositoryFactory factory;
private ClassLoader classLoader;
private BeanFactory beanFactory;
private ReactivePersonRepository repository;
private ReactiveCappedCollectionRepository cappedRepository;
Person dave, oliver, carter, boyd, stefan, leroi, alicia;
@Override
public void setBeanClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader == null ? org.springframework.util.ClassUtils.getDefaultClassLoader() : classLoader;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
this.beanFactory = beanFactory;
}
@Before
public void setUp() throws Exception {
factory = new ReactiveMongoRepositoryFactory(template);
factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class);
factory.setBeanClassLoader(classLoader);
factory.setBeanFactory(beanFactory);
factory.setEvaluationContextProvider(DefaultEvaluationContextProvider.INSTANCE);
repository = factory.getRepository(ReactivePersonRepository.class);
cappedRepository = factory.getRepository(ReactiveCappedCollectionRepository.class);
repository.deleteAll().block();
dave = new Person("Dave", "Matthews", 42);
oliver = new Person("Oliver August", "Matthews", 4);
carter = new Person("Carter", "Beauford", 49);
carter.setSkills(Arrays.asList("Drums", "percussion", "vocals"));
Thread.sleep(10);
boyd = new Person("Boyd", "Tinsley", 45);
boyd.setSkills(Arrays.asList("Violin", "Electric Violin", "Viola", "Mandolin", "Vocals", "Guitar"));
stefan = new Person("Stefan", "Lessard", 34);
leroi = new Person("Leroi", "Moore", 41);
alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE);
TestSubscriber<Person> subscriber = TestSubscriber.create();
repository.save(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).subscribe(subscriber);
subscriber.await().assertComplete().assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindByLastName() {
List<Person> list = repository.findByLastname("Matthews").collectList().block();
assertThat(list, hasSize(2));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindMonoOfPage() {
Mono<Page<Person>> pageMono = repository.findMonoPageByLastname("Matthews", new PageRequest(0, 1));
Page<Person> persons = pageMono.block();
assertThat(persons.getContent(), hasSize(1));
assertThat(persons.getTotalPages(), is(2));
pageMono = repository.findMonoPageByLastname("Matthews", new PageRequest(0, 100));
persons = pageMono.block();
assertThat(persons.getContent(), hasSize(2));
assertThat(persons.getTotalPages(), is(1));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindMonoOfSlice() {
Mono<Slice<Person>> pageMono = repository.findMonoSliceByLastname("Matthews", new PageRequest(0, 1));
Slice<Person> persons = pageMono.block();
assertThat(persons.getContent(), hasSize(1));
assertThat(persons.hasNext(), is(true));
pageMono = repository.findMonoSliceByLastname("Matthews", new PageRequest(0, 100));
persons = pageMono.block();
assertThat(persons.getContent(), hasSize(2));
assertThat(persons.hasNext(), is(false));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindOneByLastName() {
Person carter = repository.findOneByLastname("Beauford").block();
assertThat(carter.getFirstname(), is(equalTo("Carter")));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindOneByPublisherOfLastName() {
Person carter = repository.findByLastname(Mono.just("Beauford")).block();
assertThat(carter.getFirstname(), is(equalTo("Carter")));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindByPublisherOfLastNameIn() {
List<Person> persons = repository.findByLastnameIn(Flux.just("Beauford", "Matthews")).collectList().block();
assertThat(persons, hasItems(carter, dave, oliver));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindByPublisherOfLastNameInAndAgeGreater() {
List<Person> persons = repository.findByLastnameInAndAgeGreaterThan(Flux.just("Beauford", "Matthews"), 41)
.collectList().block();
assertThat(persons, hasItems(carter, dave));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindUsingPublishersInStringQuery() {
List<Person> persons = repository.findStringQuery(Flux.just("Beauford", "Matthews"), Mono.just(41)).collectList()
.block();
assertThat(persons, hasItems(carter, dave));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldFindByLastNameAndSort() {
List<Person> persons = repository.findByLastname("Matthews", new Sort(new Order(ASC, "age"))).collectList().block();
assertThat(persons, contains(oliver, dave));
persons = repository.findByLastname("Matthews", new Sort(new Order(DESC, "age"))).collectList().block();
assertThat(persons, contains(dave, oliver));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldUseInfiniteStream() throws Exception {
template.dropCollection(Capped.class).block();
template.createCollection(Capped.class, new CollectionOptions(1000, 100, true)).block();
template.insert(new Capped("value", Math.random())).block();
BlockingQueue<Capped> documents = new LinkedBlockingDeque<>(100);
Cancellation cancellation = cappedRepository.findByKey("value").doOnNext(documents::add).subscribe();
assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue()));
template.insert(new Capped("value", Math.random())).block();
assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue()));
assertThat(documents.isEmpty(), is(true));
cancellation.dispose();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldUseInfiniteStreamWithProjection() throws Exception {
template.dropCollection(Capped.class).block();
template.createCollection(Capped.class, new CollectionOptions(1000, 100, true)).block();
template.insert(new Capped("value", Math.random())).block();
BlockingQueue<CappedProjection> documents = new LinkedBlockingDeque<>(100);
Cancellation cancellation = cappedRepository.findProjectionByKey("value").doOnNext(documents::add).subscribe();
CappedProjection projection1 = documents.poll(5, TimeUnit.SECONDS);
assertThat(projection1, is(notNullValue()));
assertThat(projection1.getRandom(), is(not(0)));
template.insert(new Capped("value", Math.random())).block();
CappedProjection projection2 = documents.poll(5, TimeUnit.SECONDS);
assertThat(projection2, is(notNullValue()));
assertThat(projection2.getRandom(), is(not(0)));
assertThat(documents.isEmpty(), is(true));
cancellation.dispose();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findsPeopleByLocationWithinCircle() {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
repository.save(dave).block();
repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170)) //
.subscribeWith(TestSubscriber.create()) //
.awaitAndAssertNextValues(dave);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findsPeopleByPageableLocationWithinCircle() {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
repository.save(dave).block();
repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170), new PageRequest(0, 10)) //
.subscribeWith(TestSubscriber.create()) //
.awaitAndAssertNextValues(dave);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findsPeopleGeoresultByLocationWithinBox() {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
repository.save(dave).block();
repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS)) //
.subscribeWith(TestSubscriber.create()) //
.awaitAndAssertNextValuesWith(personGeoResult -> {
assertThat(personGeoResult.getDistance().getValue(), is(closeTo(1, 1)));
assertThat(personGeoResult.getContent(), is(equalTo(dave)));
});
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findsPeoplePageableGeoresultByLocationWithinBox() {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
repository.save(dave).block();
repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS), new PageRequest(0, 10)) //
.subscribeWith(TestSubscriber.create()) //
.awaitAndAssertNextValuesWith(personGeoResult -> {
assertThat(personGeoResult.getDistance().getValue(), is(closeTo(1, 1)));
assertThat(personGeoResult.getContent(), is(equalTo(dave)));
});
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findsPeopleByLocationWithinBox() {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
repository.save(dave).block();
repository.findPersonByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS)) //
.subscribeWith(TestSubscriber.create()) //
.awaitAndAssertNextValues(dave);
}
interface ReactivePersonRepository extends ReactiveMongoRepository<Person, String> {
Flux<Person> findByLastname(String lastname);
Mono<Person> findOneByLastname(String lastname);
Mono<Page<Person>> findMonoPageByLastname(String lastname, Pageable pageRequest);
Mono<Slice<Person>> findMonoSliceByLastname(String lastname, Pageable pageRequest);
Mono<Person> findByLastname(Publisher<String> lastname);
Flux<Person> findByLastnameIn(Publisher<String> lastname);
Flux<Person> findByLastname(String lastname, Sort sort);
Flux<Person> findByLastnameInAndAgeGreaterThan(Flux<String> lastname, int age);
@Query("{ lastname: { $in: ?0 }, age: { $gt : ?1 } }")
Flux<Person> findStringQuery(Flux<String> lastname, Mono<Integer> age);
Flux<Person> findByLocationWithin(Circle circle);
Flux<Person> findByLocationWithin(Circle circle, Pageable pageable);
Flux<GeoResult<Person>> findByLocationNear(Point point, Distance maxDistance);
Flux<GeoResult<Person>> findByLocationNear(Point point, Distance maxDistance, Pageable pageable);
Flux<Person> findPersonByLocationNear(Point point, Distance maxDistance);
}
interface ReactiveCappedCollectionRepository extends Repository<Capped, String> {
@InfiniteStream
Flux<Capped> findByKey(String key);
@InfiniteStream
Flux<CappedProjection> findProjectionByKey(String key);
}
@Document
@NoArgsConstructor
static class Capped {
String id;
String key;
double random;
public Capped(String key, double random) {
this.key = key;
this.random = random;
}
}
interface CappedProjection {
double getRandom();
}
}

View File

@@ -0,0 +1,35 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository;
import reactor.core.publisher.Flux;
/**
* Sample reactive repository managing {@link Person} entities.
*
* @author Mark Paluch
*/
public interface ReactivePersonRepository extends ReactiveMongoRepository<Person, String> {
/**
* Returns all {@link Person}s with the given lastname.
*
* @param lastname
* @return
*/
Flux<Person> findByLastname(String lastname);
}

View File

@@ -0,0 +1,580 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanClassLoaderAware;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.annotation.Id;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.domain.Sort.Order;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory;
import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository;
import org.springframework.data.repository.query.DefaultEvaluationContextProvider;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import lombok.Data;
import lombok.NoArgsConstructor;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.TestSubscriber;
/**
* Test for {@link ReactiveMongoRepository}.
*
* @author Mark Paluch
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:reactive-infrastructure.xml")
public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanFactoryAware {
@Autowired private ReactiveMongoTemplate template;
private ReactiveMongoRepositoryFactory factory;
private ClassLoader classLoader;
private BeanFactory beanFactory;
private ReactivePersonRepostitory repository;
private ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia;
@Override
public void setBeanClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader == null ? org.springframework.util.ClassUtils.getDefaultClassLoader() : classLoader;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
this.beanFactory = beanFactory;
}
@Before
public void setUp() {
factory = new ReactiveMongoRepositoryFactory(template);
factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class);
factory.setBeanClassLoader(classLoader);
factory.setBeanFactory(beanFactory);
factory.setEvaluationContextProvider(DefaultEvaluationContextProvider.INSTANCE);
repository = factory.getRepository(ReactivePersonRepostitory.class);
repository.deleteAll().block();
dave = new ReactivePerson("Dave", "Matthews", 42);
oliver = new ReactivePerson("Oliver August", "Matthews", 4);
carter = new ReactivePerson("Carter", "Beauford", 49);
boyd = new ReactivePerson("Boyd", "Tinsley", 45);
stefan = new ReactivePerson("Stefan", "Lessard", 34);
leroi = new ReactivePerson("Leroi", "Moore", 41);
alicia = new ReactivePerson("Alicia", "Keys", 30);
TestSubscriber<ReactivePerson> subscriber = TestSubscriber.create();
repository.save(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).subscribe(subscriber);
subscriber.await().assertComplete().assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void existsByIdShouldReturnTrueForExistingObject() {
Boolean exists = repository.exists(dave.id).block();
assertThat(exists, is(true));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void existsByIdShouldReturnFalseForAbsentObject() {
TestSubscriber<Boolean> testSubscriber = TestSubscriber.subscribe(repository.exists("unknown"));
testSubscriber.await().assertComplete().assertValues(false).assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void existsByMonoOfIdShouldReturnTrueForExistingObject() {
Boolean exists = repository.exists(Mono.just(dave.id)).block();
assertThat(exists, is(true));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void existsByEmptyMonoOfIdShouldReturnEmptyMono() {
TestSubscriber<Boolean> testSubscriber = TestSubscriber.subscribe(repository.exists(Mono.empty()));
testSubscriber.await().assertComplete().assertNoValues().assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findOneShouldReturnObject() {
ReactivePerson person = repository.findOne(dave.id).block();
assertThat(person.getFirstname(), is(equalTo("Dave")));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findOneShouldCompleteWithoutValueForAbsentObject() {
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber.subscribe(repository.findOne("unknown"));
testSubscriber.await().assertComplete().assertNoValues().assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findOneByMonoOfIdShouldReturnTrueForExistingObject() {
ReactivePerson person = repository.findOne(Mono.just(dave.id)).block();
assertThat(person.id, is(equalTo(dave.id)));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findOneByEmptyMonoOfIdShouldReturnEmptyMono() {
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber.subscribe(repository.findOne(Mono.empty()));
testSubscriber.await().assertComplete().assertNoValues().assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findAllShouldReturnAllResults() {
List<ReactivePerson> persons = repository.findAll().collectList().block();
assertThat(persons, hasSize(7));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findAllByIterableOfIdShouldReturnResults() {
List<ReactivePerson> persons = repository.findAll(Arrays.asList(dave.id, boyd.id)).collectList().block();
assertThat(persons, hasSize(2));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findAllByPublisherOfIdShouldReturnResults() {
List<ReactivePerson> persons = repository.findAll(Flux.just(dave.id, boyd.id)).collectList().block();
assertThat(persons, hasSize(2));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findAllByEmptyPublisherOfIdShouldReturnResults() {
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber.subscribe(repository.findAll(Flux.empty()));
testSubscriber.await().assertComplete().assertNoValues().assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findAllWithSortShouldReturnResults() {
List<ReactivePerson> persons = repository.findAll(new Sort(new Order(Direction.ASC, "age"))).collectList().block();
assertThat(persons, hasSize(7));
assertThat(persons.get(0).getId(), is(equalTo(oliver.getId())));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findAllWithPageRequestShouldReturnPage() {
Page<ReactivePerson> people = repository.findAll(new PageRequest(0, 10)).block();
assertThat(people.getTotalPages(), is(1));
List<String> ids = people.getContent().stream().map(ReactivePerson::getId).collect(Collectors.toList());
assertThat(ids, hasSize(7));
assertThat(ids, hasItems(dave.id, carter.id));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void findAllWithPageRequestOfPageSize1ShouldReturnPage() {
Page<ReactivePerson> people = repository.findAll(new PageRequest(1, 1)).block();
List<String> ids = people.getContent().stream().map(ReactivePerson::getId).collect(Collectors.toList());
assertThat(people.getTotalPages(), is(7));
assertThat(ids, hasSize(1));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void countShouldReturnNumberOfRecords() {
TestSubscriber<Long> testSubscriber = TestSubscriber.subscribe(repository.count());
testSubscriber.await().assertComplete().assertValueCount(1).assertValues(7L).assertNoError();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void insertEntityShouldInsertEntity() {
repository.deleteAll().block();
ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36);
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber.subscribe(repository.insert(person));
testSubscriber.await().assertComplete().assertValueCount(1).assertValues(person);
assertThat(person.getId(), is(notNullValue()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void insertShouldDeferredWrite() {
ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36);
repository.insert(person);
assertThat(person.getId(), is(nullValue()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void insertIterableOfEntitiesShouldInsertEntity() {
repository.deleteAll().block();
dave.setId(null);
oliver.setId(null);
boyd.setId(null);
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber
.subscribe(repository.insert(Arrays.asList(dave, oliver, boyd)));
testSubscriber.await().assertComplete().assertValueCount(3).assertValues(dave, oliver, boyd);
assertThat(dave.getId(), is(notNullValue()));
assertThat(oliver.getId(), is(notNullValue()));
assertThat(boyd.getId(), is(notNullValue()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void insertPublisherOfEntitiesShouldInsertEntity() {
repository.deleteAll().block();
dave.setId(null);
oliver.setId(null);
boyd.setId(null);
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber
.subscribe(repository.insert(Flux.just(dave, oliver, boyd)));
testSubscriber.await().assertComplete().assertValueCount(3);
assertThat(dave.getId(), is(notNullValue()));
assertThat(oliver.getId(), is(notNullValue()));
assertThat(boyd.getId(), is(notNullValue()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void saveEntityShouldUpdateExistingEntity() {
dave.setFirstname("Hello, Dave");
dave.setLastname("Bowman");
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber.subscribe(repository.save(dave));
testSubscriber.await().assertComplete().assertValueCount(1).assertValues(dave);
List<ReactivePerson> matthews = repository.findByLastname("Matthews").collectList().block();
assertThat(matthews, hasSize(1));
assertThat(matthews, contains(oliver));
assertThat(matthews, not(contains(dave)));
ReactivePerson reactivePerson = repository.findOne(dave.id).block();
assertThat(reactivePerson.getFirstname(), is(equalTo(dave.getFirstname())));
assertThat(reactivePerson.getLastname(), is(equalTo(dave.getLastname())));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void saveEntityShouldInsertNewEntity() {
ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36);
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber.subscribe(repository.save(person));
testSubscriber.await().assertComplete().assertValueCount(1).assertValues(person);
ReactivePerson reactivePerson = repository.findOne(person.id).block();
assertThat(reactivePerson.getFirstname(), is(equalTo(person.getFirstname())));
assertThat(reactivePerson.getLastname(), is(equalTo(person.getLastname())));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void saveIterableOfNewEntitiesShouldInsertEntity() {
repository.deleteAll().block();
dave.setId(null);
oliver.setId(null);
boyd.setId(null);
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber
.subscribe(repository.save(Arrays.asList(dave, oliver, boyd)));
testSubscriber.await().assertComplete().assertValueCount(3).assertValues(dave, oliver, boyd);
assertThat(dave.getId(), is(notNullValue()));
assertThat(oliver.getId(), is(notNullValue()));
assertThat(boyd.getId(), is(notNullValue()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void saveIterableOfMixedEntitiesShouldInsertEntity() {
ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36);
dave.setFirstname("Hello, Dave");
dave.setLastname("Bowman");
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber
.subscribe(repository.save(Arrays.asList(person, dave)));
testSubscriber.await().assertComplete().assertValueCount(2);
ReactivePerson persistentDave = repository.findOne(dave.id).block();
assertThat(persistentDave, is(equalTo(dave)));
assertThat(person.id, is(notNullValue()));
ReactivePerson persistentHomer = repository.findOne(person.id).block();
assertThat(persistentHomer, is(equalTo(person)));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void savePublisherOfEntitiesShouldInsertEntity() {
repository.deleteAll().block();
dave.setId(null);
oliver.setId(null);
boyd.setId(null);
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber
.subscribe(repository.save(Flux.just(dave, oliver, boyd)));
testSubscriber.await().assertComplete().assertValueCount(3);
assertThat(dave.getId(), is(notNullValue()));
assertThat(oliver.getId(), is(notNullValue()));
assertThat(boyd.getId(), is(notNullValue()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void deleteAllShouldRemoveEntities() {
repository.deleteAll().block();
TestSubscriber<ReactivePerson> testSubscriber = TestSubscriber.subscribe(repository.findAll());
testSubscriber.await().assertComplete().assertValueCount(0);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void deleteByIdShouldRemoveEntity() {
TestSubscriber<Void> testSubscriber = TestSubscriber.subscribe(repository.delete(dave.id));
testSubscriber.await().assertComplete().assertNoValues();
TestSubscriber<ReactivePerson> verificationSubscriber = TestSubscriber.subscribe(repository.findOne(dave.id));
verificationSubscriber.await().assertComplete().assertNoValues();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void deleteShouldRemoveEntity() {
TestSubscriber<Void> testSubscriber = TestSubscriber.subscribe(repository.delete(dave));
testSubscriber.await().assertComplete().assertNoValues();
TestSubscriber<ReactivePerson> verificationSubscriber = TestSubscriber.subscribe(repository.findOne(dave.id));
verificationSubscriber.await().assertComplete().assertNoValues();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void deleteIterableOfEntitiesShouldRemoveEntities() {
TestSubscriber<Void> testSubscriber = TestSubscriber.subscribe(repository.delete(Arrays.asList(dave, boyd)));
testSubscriber.await().assertComplete().assertNoValues();
TestSubscriber<ReactivePerson> verificationSubscriber = TestSubscriber.subscribe(repository.findOne(boyd.id));
verificationSubscriber.await().assertComplete().assertNoValues();
List<ReactivePerson> matthews = repository.findByLastname("Matthews").collectList().block();
assertThat(matthews, hasSize(1));
assertThat(matthews, contains(oliver));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void deletePublisherOfEntitiesShouldRemoveEntities() {
TestSubscriber<Void> testSubscriber = TestSubscriber.subscribe(repository.delete(Flux.just(dave, boyd)));
testSubscriber.await().assertComplete().assertNoValues();
TestSubscriber<ReactivePerson> verificationSubscriber = TestSubscriber.subscribe(repository.findOne(boyd.id));
verificationSubscriber.await().assertComplete().assertNoValues();
List<ReactivePerson> matthews = repository.findByLastname("Matthews").collectList().block();
assertThat(matthews, hasSize(1));
assertThat(matthews, contains(oliver));
}
interface ReactivePersonRepostitory extends ReactiveMongoRepository<ReactivePerson, String> {
Flux<ReactivePerson> findByLastname(String lastname);
}
@Data
@NoArgsConstructor
static class ReactivePerson {
@Id String id;
String firstname;
String lastname;
int age;
public ReactivePerson(String firstname, String lastname, int age) {
this.firstname = firstname;
this.lastname = lastname;
this.age = age;
}
}
}

View File

@@ -0,0 +1,76 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import java.util.Arrays;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.repository.ReactivePersonRepository;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.mongodb.reactivestreams.client.MongoClients;
/**
* Integration tests for {@link ReactiveMongoRepositoriesRegistrar}.
*
* @author Mark Paluch
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration
public class ReactiveMongoRepositoriesRegistrarIntegrationTests {
@Configuration
@EnableReactiveMongoRepositories(basePackages = "org.springframework.data.mongodb.repository")
static class Config {
@Bean
public ReactiveMongoTemplate reactiveMongoTemplate() throws Exception {
return new ReactiveMongoTemplate(new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database"));
}
}
@Autowired ReactivePersonRepository personRepository;
@Autowired ApplicationContext context;
/**
* @see DATAMONGO-1444
*/
@Test
public void testConfiguration() {}
/**
* @see DATAMONGO-1444
*/
@Test
public void registersTypePredictingPostProcessor() {
Iterable<String> beanNames = Arrays.asList(context.getBeanDefinitionNames());
assertThat(beanNames, hasItem(containsString("RepositoryFactoryBeanSupport_Predictor")));
}
}

View File

@@ -0,0 +1,121 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.config;
import static org.junit.Assert.*;
import java.util.Collection;
import org.junit.Test;
import org.springframework.core.env.Environment;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.type.StandardAnnotationMetadata;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.mongodb.repository.ReactiveMongoRepository;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource;
import org.springframework.data.repository.config.RepositoryConfiguration;
import org.springframework.data.repository.config.RepositoryConfigurationSource;
import org.springframework.data.repository.reactive.ReactiveCrudRepository;
import org.springframework.data.repository.reactive.RxJavaCrudRepository;
/**
* Unit tests for {@link ReactiveMongoRepositoryConfigurationExtension}.
*
* @author Mark Paluch
*/
public class ReactiveMongoRepositoryConfigurationExtensionUnitTests {
StandardAnnotationMetadata metadata = new StandardAnnotationMetadata(Config.class, true);
ResourceLoader loader = new PathMatchingResourcePatternResolver();
Environment environment = new StandardEnvironment();
RepositoryConfigurationSource configurationSource = new AnnotationRepositoryConfigurationSource(metadata,
EnableReactiveMongoRepositories.class, loader, environment);
/**
* @see DATAMONGO-1444
*/
@Test
public void isStrictMatchIfDomainTypeIsAnnotatedWithDocument() {
ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension();
assertHasRepo(SampleRepository.class, extension.getRepositoryConfigurations(configurationSource, loader, true));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void isStrictMatchIfRepositoryExtendsStoreSpecificBase() {
ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension();
assertHasRepo(StoreRepository.class, extension.getRepositoryConfigurations(configurationSource, loader, true));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void isNotStrictMatchIfDomainTypeIsNotAnnotatedWithDocument() {
ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension();
assertDoesNotHaveRepo(UnannotatedRepository.class,
extension.getRepositoryConfigurations(configurationSource, loader, true));
}
private static void assertHasRepo(Class<?> repositoryInterface,
Collection<RepositoryConfiguration<RepositoryConfigurationSource>> configs) {
for (RepositoryConfiguration<?> config : configs) {
if (config.getRepositoryInterface().equals(repositoryInterface.getName())) {
return;
}
}
fail("Expected to find config for repository interface ".concat(repositoryInterface.getName()).concat(" but got ")
.concat(configs.toString()));
}
private static void assertDoesNotHaveRepo(Class<?> repositoryInterface,
Collection<RepositoryConfiguration<RepositoryConfigurationSource>> configs) {
for (RepositoryConfiguration<?> config : configs) {
if (config.getRepositoryInterface().equals(repositoryInterface.getName())) {
fail("Expected not to find config for repository interface ".concat(repositoryInterface.getName()));
}
}
}
@EnableReactiveMongoRepositories(considerNestedRepositories = true)
static class Config {
}
@Document
static class Sample {}
static class Store {}
interface SampleRepository extends ReactiveCrudRepository<Sample, Long> {}
interface UnannotatedRepository extends RxJavaCrudRepository<Store, Long> {}
interface StoreRepository extends ReactiveMongoRepository<Store, Long> {}
}

View File

@@ -0,0 +1,28 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.custom;
import org.springframework.data.mongodb.repository.User;
import org.springframework.data.repository.reactive.RxJavaCrudRepository;
/**
* @author Mark Paluch
*/
public interface CustomReactiveMongoRepository
extends RxJavaCrudRepository<User, String>, CustomReactiveMongoRepositoryCustom {
}

View File

@@ -0,0 +1,30 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.custom;
import java.util.List;
import org.springframework.data.mongodb.repository.User;
/**
* @author Mark Paluch
*/
public interface CustomReactiveMongoRepositoryCustom {
List<User> findByUsernameCustom(String username);
}

View File

@@ -0,0 +1,37 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.custom;
import java.util.Collections;
import java.util.List;
import org.springframework.data.mongodb.repository.User;
/**
* @author Mark Paluch
*/
public class CustomReactiveMongoRepositoryImpl implements CustomReactiveMongoRepositoryCustom {
@Override
public List<User> findByUsernameCustom(String username) {
User user = new User();
user.setUsername(username);
return Collections.singletonList(user);
}
}

View File

@@ -0,0 +1,62 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.custom;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.ImportResource;
import org.springframework.data.mongodb.repository.User;
import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* Integration tests for custom reactive Repository implementations.
*
* @author Mark Paluch
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration
public class CustomReactiveRepositoryImplementationTests {
@Configuration
@EnableReactiveMongoRepositories
@ImportResource("classpath:reactive-infrastructure.xml")
static class Config {}
@Autowired CustomReactiveMongoRepository customMongoRepository;
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldExecuteMethodOnCustomRepositoryImplementation() {
String username = "bubu";
List<User> users = customMongoRepository.findByUsernameCustom(username);
assertThat(users.size(), is(1));
assertThat(users.get(0), is(notNullValue()));
assertThat(users.get(0).getUsername(), is(username));
}
}

View File

@@ -0,0 +1,142 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Range;
import org.springframework.data.geo.Distance;
import org.springframework.data.geo.GeoResult;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.Person;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.PagedExecution;
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.SlicedExecution;
import org.springframework.data.util.ClassTypeInformation;
import org.springframework.util.ClassUtils;
import reactor.core.publisher.Flux;
/**
* Unit tests for {@link ReactiveMongoQueryExecution}.
*
* @author Mark Paluch
*/
@RunWith(MockitoJUnitRunner.class)
public class ReactiveMongoQueryExecutionUnitTests {
@Mock private ReactiveMongoOperations operations;
@Mock private MongoParameterAccessor parameterAccessor;
/**
* @see DATAMONGO-1444
*/
@Test
public void slicedExecutionShouldApplyQuerySettings() throws Exception {
Query query = new Query();
new SlicedExecution(operations, new PageRequest(1, 10)).execute(query, Person.class, "person");
assertThat(query.getLimit(), is(equalTo(11)));
assertThat(query.getSkip(), is(equalTo(10)));
verify(operations).find(query, Person.class, "person");
}
/**
* @see DATAMONGO-1444
*/
@Test
public void pagedExecutionShouldApplyQuerySettings() throws Exception {
Query query = new Query();
new PagedExecution(operations, new PageRequest(1, 10)).execute(query, Person.class, "person");
assertThat(query.getLimit(), is(equalTo(10)));
assertThat(query.getSkip(), is(equalTo(10)));
verify(operations).find(query, Person.class, "person");
verify(operations).count(query, Person.class, "person");
}
/**
* @see DATAMONGO-1444
*/
@Test
public void geoNearExecutionShouldApplyQuerySettings() throws Exception {
Method geoNear = ClassUtils.getMethod(GeoRepo.class, "geoNear");
Query query = new Query();
when(parameterAccessor.getGeoNearLocation()).thenReturn(new Point(1, 2));
when(parameterAccessor.getDistanceRange()).thenReturn(new Range<>(new Distance(10), new Distance(15)));
when(parameterAccessor.getPageable()).thenReturn(new PageRequest(1, 10));
new GeoNearExecution(operations, parameterAccessor, ClassTypeInformation.fromReturnTypeOf(geoNear)).execute(query,
Person.class, "person");
ArgumentCaptor<NearQuery> queryArgumentCaptor = ArgumentCaptor.forClass(NearQuery.class);
verify(operations).geoNear(queryArgumentCaptor.capture(), eq(Person.class), eq("person"));
NearQuery nearQuery = queryArgumentCaptor.getValue();
assertThat(nearQuery.toDocument().get("near"), is(equalTo(Arrays.asList(1d, 2d))));
assertThat(nearQuery.getSkip(), is(10));
assertThat(nearQuery.getMinDistance(), is(equalTo(new Distance(10))));
assertThat(nearQuery.getMaxDistance(), is(equalTo(new Distance(15))));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void geoNearExecutionShouldApplyMinimalSettings() throws Exception {
Method geoNear = ClassUtils.getMethod(GeoRepo.class, "geoNear");
Query query = new Query();
when(parameterAccessor.getGeoNearLocation()).thenReturn(new Point(1, 2));
when(parameterAccessor.getDistanceRange()).thenReturn(new Range<>(null, null));
new GeoNearExecution(operations, parameterAccessor, ClassTypeInformation.fromReturnTypeOf(geoNear)).execute(query,
Person.class, "person");
ArgumentCaptor<NearQuery> queryArgumentCaptor = ArgumentCaptor.forClass(NearQuery.class);
verify(operations).geoNear(queryArgumentCaptor.capture(), eq(Person.class), eq("person"));
NearQuery nearQuery = queryArgumentCaptor.getValue();
assertThat(nearQuery.toDocument().get("near"), is(equalTo(Arrays.asList(1d, 2d))));
assertThat(nearQuery.getSkip(), is(0));
assertThat(nearQuery.getMinDistance(), is(nullValue()));
assertThat(nearQuery.getMaxDistance(), is(nullValue()));
}
interface GeoRepo {
Flux<GeoResult<Person>> geoNear();
}
}

View File

@@ -0,0 +1,248 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.lang.reflect.Method;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.geo.Distance;
import org.springframework.data.geo.GeoResult;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.User;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.repository.Address;
import org.springframework.data.mongodb.repository.Contact;
import org.springframework.data.mongodb.repository.Meta;
import org.springframework.data.mongodb.repository.Person;
import org.springframework.data.projection.ProjectionFactory;
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Unit test for {@link ReactiveMongoQueryMethod}.
*
* @author Mark Paluch
*/
public class ReactiveMongoQueryMethodUnitTests {
MongoMappingContext context;
@Before
public void setUp() {
context = new MongoMappingContext();
}
/**
* @see DATAMONGO-1444
*/
@Test
public void detectsCollectionFromRepoTypeIfReturnTypeNotAssignable() throws Exception {
ReactiveMongoQueryMethod queryMethod = queryMethod(SampleRepository.class, "method");
MongoEntityMetadata<?> metadata = queryMethod.getEntityInformation();
assertThat(metadata.getJavaType(), is(typeCompatibleWith(Address.class)));
assertThat(metadata.getCollectionName(), is("contact"));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void detectsCollectionFromReturnTypeIfReturnTypeAssignable() throws Exception {
MongoQueryMethod queryMethod = queryMethod(SampleRepository2.class, "method");
MongoEntityMetadata<?> entityInformation = queryMethod.getEntityInformation();
assertThat(entityInformation.getJavaType(), is(typeCompatibleWith(Person.class)));
assertThat(entityInformation.getCollectionName(), is("person"));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void discoversUserAsDomainTypeForGeoPagingQueryMethod() throws Exception {
MongoQueryMethod queryMethod = queryMethod(PersonRepository.class, "findByLocationNear", Point.class,
Distance.class, Pageable.class);
assertThat(queryMethod.isGeoNearQuery(), is(false));
assertThat(queryMethod.isPageQuery(), is(false));
queryMethod = queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class);
assertThat(queryMethod.isGeoNearQuery(), is(false));
assertThat(queryMethod.isPageQuery(), is(false));
assertThat(queryMethod.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class)));
assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery(),
is(true));
assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery(),
is(false));
assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery(),
is(true));
}
/**
* @see DATAMONGO-1444
*/
@Test(expected = IllegalArgumentException.class)
public void rejectsNullMappingContext() throws Exception {
Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Point.class);
new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class),
new SpelAwareProxyProjectionFactory(), null);
}
/**
* @see DATAMONGO-1444
*/
@Test(expected = IllegalStateException.class)
public void rejectsMonoPageableResult() throws Exception {
queryMethod(PersonRepository.class, "findMonoByLastname", String.class, Pageable.class);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void createsMongoQueryMethodObjectForMethodReturningAnInterface() throws Exception {
queryMethod(SampleRepository2.class, "methodReturningAnInterface");
}
/**
* @see DATAMONGO-1444
*/
@Test
public void createsMongoQueryMethodWithEmptyMetaCorrectly() throws Exception {
MongoQueryMethod method = queryMethod(PersonRepository.class, "emptyMetaAnnotation");
assertThat(method.hasQueryMetaAttributes(), is(true));
assertThat(method.getQueryMetaAttributes().hasValues(), is(false));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void createsMongoQueryMethodWithMaxExecutionTimeCorrectly() throws Exception {
MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxExecutionTime");
assertThat(method.hasQueryMetaAttributes(), is(true));
assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void acceptsPageableMethodsUsingWrappedPage() throws Exception {
MongoQueryMethod method = queryMethod(PersonRepository.class, "findMonoPageByLastname", String.class,
Pageable.class);
assertThat(method.isPageQuery(), is(true));
assertThat(method.isSliceQuery(), is(false));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void acceptsPageableMethodsUsingWrappedSlice() throws Exception {
MongoQueryMethod method = queryMethod(PersonRepository.class, "findMonoSliceByLastname", String.class,
Pageable.class);
assertThat(method.isPageQuery(), is(false));
assertThat(method.isSliceQuery(), is(true));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() throws Exception {
MongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class);
assertThat(method.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class)));
}
private ReactiveMongoQueryMethod queryMethod(Class<?> repository, String name, Class<?>... parameters)
throws Exception {
Method method = repository.getMethod(name, parameters);
ProjectionFactory factory = new SpelAwareProxyProjectionFactory();
return new ReactiveMongoQueryMethod(method, new DefaultRepositoryMetadata(repository), factory, context);
}
interface PersonRepository extends Repository<User, Long> {
Mono<Person> findMonoByLastname(String lastname, Pageable pageRequest);
Mono<Page<Person>> findMonoPageByLastname(String lastname, Pageable pageRequest);
Mono<Slice<Person>> findMonoSliceByLastname(String lastname, Pageable pageRequest);
// Misses Pageable
Flux<User> findByLocationNear(Point point, Distance distance);
Flux<User> findByLocationNear(Point point, Distance distance, Pageable pageable);
Mono<GeoResult<User>> findByEmailAddress(String lastname, Point location);
Flux<User> findByFirstname(String firstname, Point location);
Flux<GeoResult<User>> findByLastname(String lastname, Point location);
@Meta
Flux<User> emptyMetaAnnotation();
@Meta(maxExecutionTimeMs = 100)
Flux<User> metaWithMaxExecutionTime();
void deleteByUserName(String userName);
}
interface SampleRepository extends Repository<Contact, Long> {
List<Address> method();
}
interface SampleRepository2 extends Repository<Contact, Long> {
List<Person> method();
Customer methodReturningAnInterface();
}
interface Customer {}
}

View File

@@ -0,0 +1,297 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.query;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.Map;
import javax.xml.bind.DatatypeConverter;
import org.bson.BSON;
import org.bson.Document;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.repository.Address;
import org.springframework.data.mongodb.repository.Person;
import org.springframework.data.mongodb.repository.Query;
import org.springframework.data.projection.ProjectionFactory;
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
import org.springframework.data.repository.query.DefaultEvaluationContextProvider;
import org.springframework.data.repository.util.QueryExecutionConverters;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Unit tests for {@link ReactiveStringBasedMongoQuery}.
*
* @author Mark Paluch
*/
@RunWith(MockitoJUnitRunner.class)
public class ReactiveStringBasedMongoQueryUnitTests {
SpelExpressionParser PARSER = new SpelExpressionParser();
@Mock ReactiveMongoOperations operations;
@Mock DbRefResolver factory;
MongoConverter converter;
@Before
public void setUp() {
when(operations.getConverter()).thenReturn(converter);
this.converter = new MappingMongoConverter(factory, new MongoMappingContext());
}
/**
* @see DATAMONGO-1444
*/
@Test
public void bindsSimplePropertyCorrectly() throws Exception {
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastname", String.class);
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews");
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}");
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void bindsComplexPropertyCorrectly() throws Exception {
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByAddress", Address.class);
Address address = new Address("Foo", "0123", "Bar");
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, address);
Document dbObject = new Document();
converter.write(address, dbObject);
dbObject.remove(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY);
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
Document queryObject = new Document("address", dbObject);
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(queryObject);
assertThat(query.getQueryObject().toJson(), is(reference.getQueryObject().toJson()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void constructsDeleteQueryCorrectly() throws Exception {
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("removeByLastname", String.class);
assertThat(mongoQuery.isDeleteQuery(), is(true));
}
/**
* @see DATAMONGO-1444
*/
@Test(expected = IllegalArgumentException.class)
public void preventsDeleteAndCountFlagAtTheSameTime() throws Exception {
createQueryForMethod("invalidMethod", String.class);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldSupportFindByParameterizedCriteriaAndFields() throws Exception {
ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Document("firstname", "first").append("lastname", "last"), Collections.singletonMap("lastname", 1));
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByParameterizedCriteriaAndFields",
Document.class, Map.class);
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor);
assertThat(query.getQueryObject(),
is(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject()));
assertThat(query.getFieldsObject(), is(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldParseQueryWithParametersInExpression() throws Exception {
ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, 1, 2, 3, 4);
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithParametersInExpression", int.class,
int.class, int.class, int.class);
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor);
assertThat(query.getQueryObject(),
is(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}")
.getQueryObject()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldParseJsonKeyReplacementCorrectly() throws Exception {
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("methodWithPlaceholderInKeyOfJsonStructure",
String.class, String.class);
ConvertingParameterAccessor parameterAccessor = StubParameterAccessor.getAccessor(converter, "key", "value");
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor);
assertThat(query.getQueryObject(), is(new Document().append("key", "value")));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldSupportExpressionsInCustomQueries() throws Exception {
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews");
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpression", String.class);
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}");
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldSupportExpressionsInCustomQueriesWithNestedObject() throws Exception {
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2");
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndNestedObject",
boolean.class, String.class);
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{ \"id\" : { \"$exists\" : true}}");
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldSupportExpressionsInCustomQueriesWithMultipleNestedObjects() throws Exception {
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2");
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndMultipleNestedObjects",
boolean.class, String.class, String.class);
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(
"{ \"id\" : { \"$exists\" : true} , \"foo\" : 42 , \"bar\" : { \"$exists\" : false}}");
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void shouldSupportNonQuotedBinaryDataReplacement() throws Exception {
byte[] binaryData = "Matthews".getBytes("UTF-8");
ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, binaryData);
ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsBinary", byte[].class);
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor);
org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : { '$binary' : '"
+ DatatypeConverter.printBase64Binary(binaryData) + "', '$type' : " + BSON.B_GENERAL + "}}");
assertThat(query.getQueryObject().toJson(), is(reference.getQueryObject().toJson()));
}
private ReactiveStringBasedMongoQuery createQueryForMethod(String name, Class<?>... parameters) throws Exception {
DefaultConversionService conversionService = new DefaultConversionService();
QueryExecutionConverters.registerConvertersIn(conversionService);
Method method = SampleRepository.class.getMethod(name, parameters);
ProjectionFactory factory = new SpelAwareProxyProjectionFactory();
ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method,
new DefaultRepositoryMetadata(SampleRepository.class), factory, converter.getMappingContext());
return new ReactiveStringBasedMongoQuery(queryMethod, operations, PARSER, DefaultEvaluationContextProvider.INSTANCE,
conversionService);
}
private interface SampleRepository extends Repository<Person, Long> {
@Query("{ 'lastname' : ?0 }")
Mono<Person> findByLastname(String lastname);
@Query("{ 'lastname' : ?0 }")
Mono<Person> findByLastnameAsBinary(byte[] lastname);
@Query("{ 'address' : ?0 }")
Mono<Person> findByAddress(Address address);
@Query(value = "{ 'lastname' : ?0 }", delete = true)
Mono<Void> removeByLastname(String lastname);
@Query(value = "{ 'lastname' : ?0 }", delete = true, count = true)
Mono<Void> invalidMethod(String lastname);
@Query(value = "?0", fields = "?1")
Mono<Document> findByParameterizedCriteriaAndFields(Document criteria, Map<String, Integer> fields);
@Query("{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}")
Flux<Document> findByQueryWithParametersInExpression(int param1, int param2, int param3, int param4);
@Query("{ ?0 : ?1}")
Mono<Object> methodWithPlaceholderInKeyOfJsonStructure(String keyReplacement, String valueReplacement);
@Query("{'lastname': ?#{[0]} }")
Flux<Person> findByQueryWithExpression(String param0);
@Query("{'id':?#{ [0] ? { $exists :true} : [1] }}")
Flux<Person> findByQueryWithExpressionAndNestedObject(boolean param0, String param1);
@Query("{'id':?#{ [0] ? { $exists :true} : [1] }, 'foo':42, 'bar': ?#{ [0] ? { $exists :false} : [1] }}")
Flux<Person> findByQueryWithExpressionAndMultipleNestedObjects(boolean param0, String param1, String param2);
}
}

View File

@@ -0,0 +1,150 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import org.junit.Test;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.mongodb.repository.support.ReactivePageImpl;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
* Unit tests for {@link ReactivePageImpl}.
*
* @author Mark Paluch
*/
public class ReactivePageImplUnitTests {
/**
* @see DATAMONGO-1444
*/
@Test(expected = IllegalArgumentException.class)
public void preventsNullContentForAdvancedSetup() throws Exception {
new ReactivePageImpl<Object>(null, null, Mono.just(0L));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void returnsNextPageable() {
Page<Object> page = new ReactivePageImpl<>(Flux.just(new Object()), new PageRequest(0, 1), Mono.just(10L));
assertThat(page.isFirst(), is(true));
assertThat(page.hasPrevious(), is(false));
assertThat(page.previousPageable(), is(nullValue()));
assertThat(page.isLast(), is(false));
assertThat(page.hasNext(), is(true));
assertThat(page.nextPageable(), is(new PageRequest(1, 1)));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void returnsContentBoundedByPageSize() {
Page<Object> page = new ReactivePageImpl<>(Flux.just(new Object(), new Object()), new PageRequest(0, 1), Mono.just(10L));
assertThat(page.getContent(), hasSize(1));
assertThat(page.hasNext(), is(true));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void returnsPreviousPageable() {
Page<Object> page = new ReactivePageImpl<>(Flux.just(new Object()), new PageRequest(1, 1), Mono.just(2L));
assertThat(page.isFirst(), is(false));
assertThat(page.hasPrevious(), is(true));
assertThat(page.previousPageable(), is(new PageRequest(0, 1)));
assertThat(page.isLast(), is(true));
assertThat(page.hasNext(), is(false));
assertThat(page.nextPageable(), is(nullValue()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void transformsPageCorrectly() {
Page<Integer> transformed = new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(0, 2), Mono.just(10L))
.map(String::length);
assertThat(transformed.getContent(), hasSize(2));
assertThat(transformed.getContent(), contains(3, 3));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void adaptsTotalForLastPageOnIntermediateDeletion() {
assertThat(new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(0, 5), Mono.just(3L)).getTotalElements(),
is(2L));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void adaptsTotalForLastPageOnIntermediateInsertion() {
assertThat(new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(0, 5), Mono.just(1L)).getTotalElements(),
is(2L));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void adaptsTotalForLastPageOnIntermediateDeletionOnLastPate() {
assertThat(
new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(1, 10), Mono.just(13L)).getTotalElements(),
is(12L));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void adaptsTotalForLastPageOnIntermediateInsertionOnLastPate() {
assertThat(
new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(1, 10), Mono.just(11L)).getTotalElements(),
is(12L));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void doesNotAdapttotalIfPageIsEmpty() {
assertThat(new ReactivePageImpl<String>(Flux.empty(), new PageRequest(1, 10), Mono.just(0L)).getTotalElements(),
is(0L));
}
}

View File

@@ -0,0 +1,88 @@
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.repository.support;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import org.junit.Test;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import reactor.core.publisher.Flux;
/**
* Unit tests for {@link ReactiveSliceImpl}.
*
* @author Mark Paluch
*/
public class ReactiveSliceImplUnitTests {
/**
* @see DATAMONGO-1444
*/
@Test(expected = IllegalArgumentException.class)
public void preventsNullContentForAdvancedSetup() throws Exception {
new ReactiveSliceImpl<Object>(null, null);
}
/**
* @see DATAMONGO-1444
*/
@Test
public void returnsNextPageable() {
Slice<Object> page = new ReactiveSliceImpl<>(Flux.just(new Object(), new Object()), new PageRequest(0, 1));
assertThat(page.isFirst(), is(true));
assertThat(page.hasPrevious(), is(false));
assertThat(page.previousPageable(), is(nullValue()));
assertThat(page.isLast(), is(false));
assertThat(page.hasNext(), is(true));
assertThat(page.nextPageable(), is(new PageRequest(1, 1)));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void returnsPreviousPageable() {
Slice<Object> page = new ReactiveSliceImpl<>(Flux.just(new Object()), new PageRequest(1, 1));
assertThat(page.isFirst(), is(false));
assertThat(page.hasPrevious(), is(true));
assertThat(page.previousPageable(), is(new PageRequest(0, 1)));
assertThat(page.isLast(), is(true));
assertThat(page.hasNext(), is(false));
assertThat(page.nextPageable(), is(nullValue()));
}
/**
* @see DATAMONGO-1444
*/
@Test
public void transformsPageCorrectly() {
Slice<Integer> transformed = new ReactiveSliceImpl<>(Flux.just("foo", "bar"), new PageRequest(0, 2))
.map(String::length);
assertThat(transformed.getContent(), hasSize(2));
assertThat(transformed.getContent(), contains(3, 3));
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd">
<bean id="mongoClient" class="org.springframework.data.mongodb.core.ReactiveMongoClientFactoryBean">
<property name="host" value="127.0.0.1"/>
<property name="port" value="27017"/>
</bean>
<bean id="reactiveMongoDbFactory" class="org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory">
<constructor-arg name="mongoClient" ref="mongoClient"/>
<constructor-arg name="databaseName" value="reactive"/>
</bean>
<bean id="reactiveMongoTemplate" class="org.springframework.data.mongodb.core.ReactiveMongoTemplate">
<constructor-arg ref="reactiveMongoDbFactory"/>
</bean>
</beans>

View File

@@ -26,7 +26,9 @@ include::{spring-data-commons-docs}/repositories.adoc[]
:leveloffset: +1
include::reference/introduction.adoc[]
include::reference/mongodb.adoc[]
include::reference/reactive-mongodb.adoc[]
include::reference/mongo-repositories.adoc[]
include::reference/reactive-mongo-repositories.adoc[]
include::{spring-data-commons-docs}/auditing.adoc[]
include::reference/mongo-auditing.adoc[]
include::reference/mapping.adoc[]

View File

@@ -0,0 +1,226 @@
[[mongo.reactive.repositories]]
= Reactive MongoDB repositories
[[mongo.reactive.repositories.intro]]
== Introduction
This chapter will point out the specialties for reactive repository support for MongoDB. This builds on the core repository support explained in <<repositories>>. So make sure you've got a sound understanding of the basic concepts explained there.
[[mongo.reactive.repositories.libraries]]
== Reactive Composition Libraries
The reactive space offers various reactive composition libraries. The most common libraries are https://github.com/ReactiveX/RxJava[RxJava] and https://projectreactor.io/[Project Reactor].
Spring Data MongoDB is built on top of the MongoDB Reactive Streams driver to provide maximal interoperability relying on the http://www.reactive-streams.org/[Reactive Streams] initiative. Static APIs such as `ReactiveMongoOperations` are provided by using Project Reactor's `Flux` and `Mono` types. Project Reactor offers various adapters to convert reactive wrapper types (`Flux` to `Observable` and vice versa) but conversion can easily clutter your code.
Spring Data's Repository abstraction is a dynamic API, mostly defined by you and your requirements, as you're declaring query methods. Reactive MongoDB repositories can be either implemented using RxJava or Project Reactor wrapper types by simply extending from one of the library-specific repository interfaces:
* `ReactiveCrudRepository`
* `ReactivePagingAndSortingRepository`
* `RxJavaCrudRepository`
* `RxJavaPagingAndSortingRepository`
Spring Data converts reactive wrapper types behind the scenes so that you can stick to your favorite composition library.
[[mongo.reactive.repositories.usage]]
== Usage
To access domain entities stored in a MongoDB you can leverage our sophisticated repository support that eases implementing those quite significantly. To do so, simply create an interface for your repository:
.Sample Person entity
====
[source,java]
----
public class Person {
@Id
private String id;
private String firstname;
private String lastname;
private Address address;
// … getters and setters omitted
}
----
====
We have a quite simple domain object here. Note that it has a property named `id` of type `ObjectId`. The default serialization mechanism used in `MongoTemplate` (which is backing the repository support) regards properties named id as document id. Currently we support `String`, `ObjectId` and `BigInteger` as id-types.
.Basic repository interface to persist Person entities
====
[source]
----
public interface ReactivePersonRepository extends ReactivePagingAndSortingRepository<Person, Long> {
Flux<Person> findByFirstname(String firstname);
Flux<Person> findByFirstname(Publisher<String> firstname);
Flux<Person> findByFirstnameOrderByLastname(String firstname, Pageable pageable);
Mono<Person> findByFirstnameAndLastname(String firstname, String lastname);
}
----
====
For JavaConfig use the `@EnableReactiveMongoRepositories` annotation. The annotation carries the very same attributes like the namespace element. If no base package is configured the infrastructure will scan the package of the annotated configuration class.
NOTE: MongoDB uses two different drivers for blocking and reactive (non-blocking) data access. It's required to create a connection using the Reactive Streams driver to provide the required infrastructure for Spring Data's Reactive MongoDB support hence you're required to provide a separate Configuration for MongoDB's Reactive Streams driver. Please also note that your application will operate on two different connections if using Reactive and Blocking Spring Data MongoDB Templates and Repositories.
.JavaConfig for repositories
====
[source,java]
----
@Configuration
@EnableReactiveMongoRepositories
class ApplicationConfig extends AbstractReactiveMongoConfiguration {
@Override
protected String getDatabaseName() {
return "e-store";
}
@Override
public MongoClient mongoClient() {
return MongoClients.create();
}
@Override
protected String getMappingBasePackage() {
return "com.oreilly.springdata.mongodb"
}
}
----
====
As our domain repository extends `ReactivePagingAndSortingRepository` it provides you with CRUD operations as well as methods for paginated and sorted access to the entities. Working with the repository instance is just a matter of dependency injecting it into a client. So accessing the second page of `Person` s at a page size of 10 would simply look something like this:
.Paging access to Person entities
====
[source,java]
----
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration
public class PersonRepositoryTests {
@Autowired ReactivePersonRepository repository;
@Test
public void readsFirstPageCorrectly() {
Mono<Page<Person>> persons = repository.findAll(new PageRequest(0, 10));
}
@Test
public void readsFirstPageAsStream() {
Flux<Person> persons = repository.findAll(new PageRequest(0, 10));
}
}
----
====
The sample creates an application context with Spring's unit test support which will perform annotation based dependency injection into test cases. Inside the test method we simply use the repository to query the datastore. We hand the repository a `PageRequest` instance that requests the first page of persons at a page size of 10.
[[mongo.reactive.repositories.features]]
== Features
Spring Data's Reactive MongoDB support comes with a reduced feature set compared to the blocking <<mongo.repositories,MongoDB Repositories>>.
Following features are supported:
* Query Methods using <<mongodb.repositories.queries,String queries and Query Derivation>>
* <<mongodb.reactive.repositories.queries.geo-spatial>>
* <<mongodb.repositories.queries.delete>>
* <<mongodb.repositories.queries.json-based>>
* <<mongodb.repositories.queries.full-text>>
* <<projections>>
Reactive Repositories do not support Type-safe Query methods using QueryDSL.
[[mongodb.reactive.repositories.queries.geo-spatial]]
=== Geo-spatial repository queries
As you've just seen there are a few keywords triggering geo-spatial operations within a MongoDB query. The `Near` keyword allows some further modification. Let's have look at some examples:
.Advanced `Near` queries
====
[source,java]
----
public interface PersonRepository extends ReactiveMongoRepository<Person, String>
// { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}}
Flux<Person> findByLocationNear(Point location, Distance distance);
}
----
====
Adding a `Distance` parameter to the query method allows restricting results to those within the given distance. If the `Distance` was set up containing a `Metric` we will transparently use `$nearSphere` instead of $code.
NOTE: Reactive Geo-spatial repository queries support the domain type and `GeoResult<T>` results within a reactive wrapper type. `GeoPage` and `GeoResults` are not supported as they contradict the deferred result approach with pre-calculating the average distance. Howevery, you can still pass in a `Pageable` argument to page results yourself.
.Using `Distance` with `Metrics`
====
[source,java]
----
Point point = new Point(43.7, 48.8);
Distance distance = new Distance(200, Metrics.KILOMETERS);
… = repository.findByLocationNear(point, distance);
// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}}
----
====
As you can see using a `Distance` equipped with a `Metric` causes `$nearSphere` clause to be added instead of a plain `$near`. Beyond that the actual distance gets calculated according to the `Metrics` used.
NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of `$nearSphere` operator.
==== Geo-near queries
[source,java]
----
public interface PersonRepository extends ReactiveMongoRepository<Person, String>
// {'geoNear' : 'location', 'near' : [x, y] }
Flux<GeoResult<Person>> findByLocationNear(Point location);
// No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance }
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance,
// 'distanceMultiplier' : metric.multiplier, 'spherical' : true }
Flux<GeoResult<Person>> findByLocationNear(Point location, Distance distance);
// Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min,
// 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier,
// 'spherical' : true }
Flux<GeoResult<Person>> findByLocationNear(Point location, Distance min, Distance max);
// {'geoNear' : 'location', 'near' : [x, y] }
Flux<GeoResult<Person>> findByLocationNear(Point location);
}
----
[[mongo.reactive.repositories.infinite-streams]]
== Infinite Streams
By default, MongoDB will automatically close a cursor when the client has exhausted all results in the cursor. Closing a cursors turns a Stream into a finite stream. However, for capped collections you may use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client exhausts the results in the initial cursor. Using Tailable Cursors with a reactive approach allows construction of infinite streams. A Tailable Cursor remains open until it's closed. It emits data as data arrives in a capped collection. Using Tailable Cursors with Collections is not possible as its result would never complete.
Spring Data MongoDB Reactive Repository support supports infinite streams by annotating a query method with `@InfiniteStream`. This works for methods returning `Flux` or `Observable` wrapper types.
[source,java]
----
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
@InfiniteStream
Flux<Person> findByFirstname(String firstname);
}
Flux<Person> stream = repository.findByFirstname("Joe");
Cancellation cancellation = stream.doOnNext(person -> System.out.println(person)).subscribe();
// …
// Later: Dispose the stream
cancellation.dispose();
----

View File

@@ -0,0 +1,542 @@
[[mongo.reactive]]
= Reactive MongoDB support
The reactive MongoDB support contains a basic set of features which are summarized below.
* Spring configuration support using Java based @Configuration classes a Mongo client instance and replica sets
* `ReactiveMongoTemplate` helper class that increases productivity using Mongo operations in a reactive manner. Includes integrated object mapping between documents and POJOs.
* Exception translation into Spring's portable Data Access Exception hierarchy
* Feature Rich Object Mapping integrated with Spring's Conversion Service
* Annotation based mapping metadata but extensible to support other metadata formats
* Persistence and mapping lifecycle events
* Java based Query, Criteria, and Update DSLs
* Automatic implementation of reactive Repository interfaces including support for custom finder methods.
For most tasks you will find yourself using `ReactiveMongoTemplate` or the Repository support that both leverage the rich mapping functionality. `ReactiveMongoTemplate` is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations. `ReactiveMongoTemplate` also provides callback methods so that it is easy for you to get a hold of the low level API artifacts such as `MongoDatabase` to communicate directly with MongoDB. The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so you can easily map your existing knowledge onto the Spring APIs.
[[mongodb-reactive-getting-started]]
== Getting Started
Spring MongoDB support requires MongoDB 2.6 or higher and Java SE 8 or higher.
First you need to set up a running Mongodb server. Refer to the http://docs.mongodb.org/manual/core/introduction/[Mongodb Quick Start guide] for an explanation on how to startup a MongoDB instance. Once installed starting MongoDB is typically a matter of executing the following command: `MONGO_HOME/bin/mongod`
To create a Spring project in STS go to File -> New -> Spring Template Project -> Simple Spring Utility Project -> press Yes when prompted. Then enter a project and a package name such as org.spring.mongodb.example.
Then add the following to pom.xml dependencies section.
[source,xml]
----
<dependencies>
<!-- other dependency elements omitted -->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb</artifactId>
<version>{version}</version>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-driver-reactivestreams</artifactId>
<version>{mongo.reactivestreams}</version>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
<version>{reactor}</version>
</dependency>
</dependencies>
----
NOTE: MongoDB uses two different drivers for blocking and reactive (non-blocking) data access. While blocking operations are provided by default, you're have to opt-in for reactive usage.
Create a simple Person class to persist:
[source,java]
----
package org.spring.mongodb.example;
public class Person {
private String id;
private String name;
private int age;
public Person(String name, int age) {
this.name = name;
this.age = age;
}
public String getId() {
return id;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
@Override
public String toString() {
return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
}
}
----
And a main application to run
[source,java]
----
package org.spring.mongodb.example;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.query.Query;
import com.mongodb.reactivestreams.client.MongoClients;
public class ReactiveMongoApp {
private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApp.class);
public static void main(String[] args) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
ReactiveMongoTemplate mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database");
mongoOps.insert(new Person("Joe", 34))
.flatMap(p -> mongoOps.findOne(new Query(where("name").is("Joe")), Person.class))
.doOnNext(person -> log.info(person.toString()))
.flatMap(person -> mongoOps.dropCollection("person"))
.doOnComplete(latch::countDown)
.subscribe();
latch.await();
}
}
----
This will produce the following output
[source]
----
2016-09-20 14:56:57,373 DEBUG .index.MongoPersistentEntityIndexCreator: 124 - Analyzing class class example.ReactiveMongoApp$Person for index information.
2016-09-20 14:56:57,452 DEBUG .data.mongodb.core.ReactiveMongoTemplate: 975 - Inserting Document containing fields: [_class, name, age] in collection: person
2016-09-20 14:56:57,541 DEBUG .data.mongodb.core.ReactiveMongoTemplate:1503 - findOne using query: { "name" : "Joe"} fields: null for class: class example.ReactiveMongoApp$Person in collection: person
2016-09-20 14:56:57,545 DEBUG .data.mongodb.core.ReactiveMongoTemplate:1979 - findOne using query: { "name" : "Joe"} in db.collection: database.person
2016-09-20 14:56:57,567 INFO example.ReactiveMongoApp: 43 - Person [id=57e1321977ac501c68d73104, name=Joe, age=34]
2016-09-20 14:56:57,573 DEBUG .data.mongodb.core.ReactiveMongoTemplate: 528 - Dropped collection [person]
----
Even in this simple example, there are few things to take notice of
* You can instantiate the central helper class of Spring Mongo, <<mongo.reactive.template,`MongoTemplate`>>, using the standard `com.mongodb.reactivestreams.client.MongoClient` object and the name of the database to use.
* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <<mongo.mapping,here>>.).
* Conventions are used for handling the id field, converting it to be a ObjectId when stored in the database.
* Mapping conventions can use field access. Notice the Person class has only getters.
* If the constructor argument names match the field names of the stored document, they will be used to instantiate the object
There is an https://github.com/spring-projects/spring-data-examples[github repository with several examples] that you can download and play around with to get a feel for how the library works.
[[mongo.reactive.driver]]
== Connecting to MongoDB with Spring and the Reactive Streams Driver
One of the first tasks when using MongoDB and Spring is to create a `com.mongodb.reactivestreams.client.MongoClient` object using the IoC container.
[[mongo.reactive.mongo-java-config]]
=== Registering a MongoClient instance using Java based metadata
An example of using Java based bean metadata to register an instance of a `com.mongodb.reactivestreams.client.MongoClient` is shown below
.Registering a com.mongodb.Mongo object using Java based bean metadata
====
[source,java]
----
@Configuration
public class AppConfig {
/*
* Use the Reactive Streams Mongo Client API to create a com.mongodb.reactivestreams.client.MongoClient instance.
*/
public @Bean MongoClient mongoClient() {
return MongoClients.create("mongodb://localhost");
}
}
----
====
This approach allows you to use the standard `com.mongodb.reactivestreams.client.MongoClient` API that you may already be used to using.
An alternative is to register an instance of `com.mongodb.reactivestreams.client.MongoClient` instance with the container using Spring's `ReactiveMongoClientFactoryBean`. As compared to instantiating a `com.mongodb.reactivestreams.client.MongoClient` instance directly, the FactoryBean approach has the added advantage of also providing the container with an ExceptionTranslator implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and use of `@Repository` is described in http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/dao.html[Spring's DAO support features].
An example of a Java based bean metadata that supports exception translation on `@Repository` annotated classes is shown below:
.Registering a com.mongodb.Mongo object using Spring's MongoClientFactoryBean and enabling Spring's exception translation support
====
[source,java]
----
@Configuration
public class AppConfig {
/*
* Factory bean that creates the com.mongodb.reactivestreams.client.MongoClient instance
*/
public @Bean ReactiveMongoClientFactoryBean mongoClient() {
ReactiveMongoClientFactoryBean mongoClient = new ReactiveMongoClientFactoryBean();
mongoClient.setHost("localhost");
return mongoClient;
}
}
----
====
To access the `com.mongodb.reactivestreams.client.MongoClient` object created by the `ReactiveMongoClientFactoryBean` in other `@Configuration` or your own classes, use a `private @Autowired MongoClient mongoClient;` field.
[[mongo.mongo-db-factory]]
=== The ReactiveMongoDatabaseFactory interface
While `com.mongodb.reactivestreams.client.MongoClient` is the entry point to the reactive MongoDB driver API, connecting to a specific MongoDB database instance requires additional information such as the database name. With that information you can obtain a `com.mongodb.reactivestreams.client.MongoDatabase` object and access all the functionality of a specific MongoDB database instance. Spring provides the `org.springframework.data.mongodb.core.ReactiveMongoDatabaseFactory` interface shown below to bootstrap connectivity to the database.
[source,java]
----
public interface ReactiveMongoDatabaseFactory {
/**
* Creates a default {@link MongoDatabase} instance.
*
* @return
* @throws DataAccessException
*/
MongoDatabase getMongoDatabase() throws DataAccessException;
/**
* Creates a {@link MongoDatabase} instance to access the database with the given name.
*
* @param dbName must not be {@literal null} or empty.
* @return
* @throws DataAccessException
*/
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
/**
* Exposes a shared {@link MongoExceptionTranslator}.
*
* @return will never be {@literal null}.
*/
PersistenceExceptionTranslator getExceptionTranslator();
}
----
The class `org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory` provides implements the ReactiveMongoDatabaseFactory interface and is created with a standard `com.mongodb.reactivestreams.client.MongoClient` instance and the database name.
Instead of using the IoC container to create an instance of ReactiveMongoTemplate, you can just use them in standard Java code as shown below.
[source,java]
----
public class MongoApp {
private static final Log log = LogFactory.getLog(MongoApp.class);
public static void main(String[] args) throws Exception {
ReactiveMongoOperations mongoOps = new ReactiveMongoOperations(*new SimpleReactiveMongoDatabaseFactory(MongoClient.create(), "database")*);
mongoOps.insert(new Person("Joe", 34))
.flatMap(p -> mongoOps.findOne(new Query(where("name").is("Joe")), Person.class))
.doOnNext(person -> log.info(person.toString()))
.flatMap(person -> mongoOps.dropCollection("person"))
.subscribe();
}
}
----
The code in bold highlights the use of SimpleMongoDbFactory and is the only difference between the listing shown in the <<mongodb-reactive-getting-started,getting started section>>.
[[mongo.mongo-db-factory-java]]
=== Registering a ReactiveMongoDatabaseFactory instance using Java based metadata
To register a ReactiveMongoDatabaseFactory instance with the container, you write code much like what was highlighted in the previous code listing. A simple example is shown below
[source,java]
----
@Configuration
public class MongoConfiguration {
public @Bean ReactiveMongoDatabaseFactory mongoDatabaseFactory() {
return new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database");
}
}
----
To define the username and password create MongoDB connection string and pass it into the factory method as shown below. This listing also shows using `ReactiveMongoDatabaseFactory` register an instance of `ReactiveMongoTemplate` with the container.
[source,java]
----
@Configuration
public class MongoConfiguration {
public @Bean ReactiveMongoDatabaseFactory mongoDatabaseFactory() {
return new SimpleMongoDbFactory(MongoClients.create("mongodb://joe:secret@localhost"), "database", userCredentials);
}
public @Bean ReactiveMongoTemplate reactiveMongoTemplate() {
return new ReactiveMongoTemplate(mongoDatabaseFactory());
}
}
----
[[mongo.reactive.template]]
== Introduction to ReactiveMongoTemplate
The class `ReactiveMongoTemplate`, located in the package `org.springframework.data.mongodb`, is the central class of the Spring's Reactive MongoDB support providing a rich feature set to interact with the database. The template offers convenience operations to create, update, delete and query for MongoDB documents and provides a mapping between your domain objects and MongoDB documents.
NOTE: Once configured, `ReactiveMongoTemplate` is thread-safe and can be reused across multiple instances.
The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the interface `MongoConverter`. Spring provides a default implementation with `MongoMappingConverter`, but you can also write your own converter. Please refer to the section on MongoConverters for more detailed information.
The `ReactiveMongoTemplate` class implements the interface `ReactiveMongoOperations`. In as much as possible, the methods on `ReactiveMongoOperations` are named after methods available on the MongoDB driver `Collection` object as as to make the API familiar to existing MongoDB developers who are used to the driver API. For example, you will find methods such as "find", "findAndModify", "findOne", "insert", "remove", "save", "update" and "updateMulti". The design goal was to make it as easy as possible to transition between the use of the base MongoDB driver and `ReactiveMongoOperations`. A major difference in between the two APIs is that ReactiveMongoOperations can be passed domain objects instead of `Document` and there are fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `Document` to specify the parameters for those operations.
NOTE: The preferred way to reference the operations on `ReactiveMongoTemplate` instance is via its interface `ReactiveMongoOperations`.
The default converter implementation used by `ReactiveMongoTemplate` is `MappingMongoConverter`. While the `MappingMongoConverter` can make use of additional metadata to specify the mapping of objects to documents it is also capable of converting objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names. These conventions as well as the use of mapping annotations is explained in the <<mongo.mapping,Mapping chapter>>.
Another central feature of `ReactiveMongoTemplate` is exception translation of exceptions thrown in the MongoDB Java driver into Spring's portable Data Access Exception hierarchy. Refer to the section on <<mongo.exception,exception translation>> for more information.
While there are many convenience methods on `ReactiveMongoTemplate` to help you easily perform common tasks if you should need to access the MongoDB driver API directly to access functionality not explicitly exposed by the MongoTemplate you can use one of several Execute callback methods to access underlying driver APIs. The execute callbacks will give you a reference to either a `com.mongodb.reactivestreams.client.MongoCollection` or a `com.mongodb.reactivestreams.client.MongoDatabase` object. Please see the section <<mongo.reactive.executioncallback,Execution Callbacks>> for more information.
Now let's look at a examples of how to work with the `ReactiveMongoTemplate` in the context of the Spring container.
[[mongo.reactive.template.instantiating]]
=== Instantiating ReactiveMongoTemplate
You can use Java to create and register an instance of `ReactiveMongoTemplate` as shown below.
.Registering a `com.mongodb.reactivestreams.client.MongoClient` object and enabling Spring's exception translation support
====
[source,java]
----
@Configuration
public class AppConfig {
public @Bean MongoClient mongoClient() {
return MongoClients.create("mongodb://localhost");
}
public @Bean ReactiveMongoTemplate reactiveMongoTemplate() {
return new ReactiveMongoTemplate(mongoClient(), "mydatabase");
}
}
----
====
There are several overloaded constructors of ReactiveMongoTemplate. These are
* `ReactiveMongoTemplate(MongoClient mongo, String databaseName)` - takes the `com.mongodb.Mongo` object and the default database name to operate against.
* `ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory)` - takes a ReactiveMongoDatabaseFactory object that encapsulated the `com.mongodb.reactivestreams.client.MongoClient` object and database name.
* `ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter)` - adds a `MongoConverter` to use for mapping.
Other optional properties that you might like to set when creating a `ReactiveMongoTemplate` are the default `WriteResultCheckingPolicy`, `WriteConcern`, and `ReadPreference`.
NOTE: The preferred way to reference the operations on `ReactiveMongoTemplate` instance is via its interface `ReactiveMongoOperations`.
[[mongo.reactive.template.writeresultchecking]]
=== WriteResultChecking Policy
When in development it is very handy to either log or throw an exception if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully but in fact the database was not modified according to your expectations. Set MongoTemplate's property to an enum with the following values, `LOG`, `EXCEPTION`, or `NONE` to either log the error, throw and exception or do nothing. The default is to use a `WriteResultChecking` value of `NONE`.
[[mongo.reactive.template.writeconcern]]
=== WriteConcern
You can set the `com.mongodb.WriteConcern` property that the `ReactiveMongoTemplate` will use for write operations if it has not yet been specified via the driver at a higher level such as `MongoDatabase`. If ReactiveMongoTemplate's `WriteConcern` property is not set it will default to the one set in the MongoDB driver's DB or Collection setting.
[[mongo.reactive.template.writeconcernresolver]]
=== WriteConcernResolver
For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert and save operations), a strategy interface called `WriteConcernResolver` can be configured on `ReactiveMongoTemplate`. Since `ReactiveMongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The `WriteConcernResolver` interface is shown below.
[source,java]
----
public interface WriteConcernResolver {
WriteConcern resolve(MongoAction action);
}
----
The passed in argument, `MongoAction`, is what you use to determine the `WriteConcern` value to be used or to use the value of the Template itself as a default. `MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `DBObject`, as well as the operation as an enumeration (`MongoActionOperation`: REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE) and a few other pieces of contextual information. For example,
[source]
----
private class MyAppWriteConcernResolver implements WriteConcernResolver {
public WriteConcern resolve(MongoAction action) {
if (action.getEntityClass().getSimpleName().contains("Audit")) {
return WriteConcern.NONE;
} else if (action.getEntityClass().getSimpleName().contains("Metadata")) {
return WriteConcern.JOURNAL_SAFE;
}
return action.getDefaultWriteConcern();
}
}
----
[[mongo.reactive.template.save-update-remove]]
== Saving, Updating, and Removing Documents
`ReactiveMongoTemplate` provides a simple way for you to save, update, and delete your domain objects and map those objects to documents stored in MongoDB.
Given a simple class such as Person
[source,java]
----
public class Person {
private String id;
private String name;
private int age;
public Person(String name, int age) {
this.name = name;
this.age = age;
}
public String getId() {
return id;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
@Override
public String toString() {
return "Person [id=" + id + ", name=" + name + ", age=" + age + "]";
}
}
----
You can save, update and delete the object as shown below.
[source,java]
----
package org.spring.mongodb.example;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.query.Query;
import com.mongodb.reactivestreams.client.MongoClients;
public class ReactiveMongoApp {
private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApp.class);
public static void main(String[] args) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
ReactiveMongoTemplate mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database");
mongoOps.insert(new Person("Joe", 34)).doOnNext(person -> log.info("Insert: " + person))
.flatMap(person -> mongoOps.findById(person.getId(), Person.class))
.doOnNext(person -> log.info("Found: " + person))
.zipWith(person -> mongoOps.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class))
.flatMap(tuple -> mongoOps.remove(tuple.getT1())).flatMap(deleteResult -> mongoOps.findAll(Person.class))
.count().doOnSuccess(count -> {
log.info("Number of people: " + count);
latch.countDown();
})
.subscribe();
latch.await();
}
}
----
There was implicit conversion using the `MongoConverter` between a `String` and `ObjectId` as stored in the database and recognizing a convention of the property "Id" name.
NOTE: This example is meant to show the use of save, update and remove operations on `ReactiveMongoTemplate` and not to show complex mapping or functional chaining functionality
The query syntax used in the example is explained in more detail in the section <<mongo.query,Querying Documents>>. Additional documentation can be found in <<mongo-template, the blocking MongoTemplate>> section.
[[mongo.reactive.tailcursors]]
== Infinite Streams
By default, MongoDB will automatically close a cursor when the client has exhausted all results in the cursor. Closing a cursors turns a Stream into a finite stream. However, for capped collections you may use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client exhausts the results in the initial cursor. Using Tailable Cursors with a reactive approach allows construction of infinite streams. A Tailable Cursor remains open until it's closed. It emits data as data arrives in a capped collection. Using Tailable Cursors with Collections is not possible as its result would never complete.
[source,java]
----
Flux<Person> stream = template.tail(query(where("name").is("Joe")), Person.class);
Cancellation cancellation = stream.doOnNext(person -> System.out.println(person)).subscribe();
// …
// Later: Dispose the stream
cancellation.dispose();
----
[[mongo.reactive.executioncallback]]
== Execution callbacks
One common design feature of all Spring template classes is that all functionality is routed into one of the templates execute callback methods. This helps ensure that exceptions and any resource management that maybe required are performed consistency. While this was of much greater need in the case of JDBC and JMS than with MongoDB, it still offers a single spot for exception translation and logging to occur. As such, using the execute callback is the preferred way to access the MongoDB driver's `MongoDatabase` and `MongoCollection` objects to perform uncommon operations that were not exposed as methods on `ReactiveMongoTemplate`.
Here is a list of execute callback methods.
* `<T> Flux<T>` *execute* `(Class<?> entityClass, ReactiveCollectionCallback<T> action)` Executes the given ReactiveCollectionCallback for the entity collection of the specified class.
* `<T> Flux<T>` *execute* `(String collectionName, ReactiveCollectionCallback<T> action)` Executes the given ReactiveCollectionCallback on the collection of the given name.
* `<T> Flux<T>` *execute* `(ReactiveDatabaseCallback<T> action)` Executes a ReactiveDatabaseCallback translating any exceptions as necessary.
Here is an example that uses the `ReactiveCollectionCallback` to return information about an index
[source,java]
----
Flux<Boolean> hasIndex = template.execute("geolocation", collection -> {
List<IndexInfo> indexes = template.indexOps(collection.getNamespace().getCollectionName()).getIndexInfo();
for (IndexInfo dbo : indexes) {
if ("location_2d".equals(dbo.getName())) {
return Mono.just(true);
}
}
return Mono.just(false);
});
----