From 2145e212ca6d9bf9f361e66d9319befbefd3d795 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 3 Mar 2016 16:19:18 +0100 Subject: [PATCH] DATAMONGO-1444 - Add support for RxJava wrapper types and slice queries. Reactive MongoDB repository can now be composed from Project Reactor and RxJava types for method arguments and return types. Query methods and methods from the base/implementation classes can be invoked with a conversion of input/output types. --- pom.xml | 1 + spring-data-mongodb-cross-store/pom.xml | 8 + spring-data-mongodb/pom.xml | 49 + .../mongodb/ReactiveMongoDatabaseFactory.java | 56 + .../config/AbstractMongoConfiguration.java | 163 +- .../AbstractReactiveMongoConfiguration.java | 90 + .../config/MappingMongoConverterParser.java | 14 +- .../config/MongoConfigurationSupport.java | 198 ++ .../mongodb/core/DefaultIndexOperations.java | 132 +- .../core/DefaultIndexOperationsProvider.java | 45 + .../core/DefaultReactiveIndexOperations.java | 102 + .../mongodb/core/FindPublisherPreparer.java | 34 + .../data/mongodb/core/IndexConverters.java | 158 ++ .../mongodb/core/IndexOperationsProvider.java | 33 + .../data/mongodb/core/MongoTemplate.java | 12 +- .../core/ReactiveCollectionCallback.java | 29 + .../core/ReactiveDatabaseCallback.java | 27 + .../mongodb/core/ReactiveIndexOperations.java | 58 + .../core/ReactiveMongoClientFactoryBean.java | 127 + ...eactiveMongoClientSettingsFactoryBean.java | 206 ++ .../core/ReactiveMongoDatabaseHolder.java | 88 + .../mongodb/core/ReactiveMongoDbUtils.java | 150 + .../mongodb/core/ReactiveMongoOperations.java | 958 +++++++ .../mongodb/core/ReactiveMongoTemplate.java | 2445 +++++++++++++++++ .../SimpleReactiveMongoDatabaseFactory.java | 134 + .../core/convert/AbstractMongoConverter.java | 10 +- .../mongodb/core/convert/MongoConverters.java | 44 +- .../MongoPersistentEntityIndexCreator.java | 119 +- .../mongodb/repository/InfiniteStream.java | 50 + .../repository/ReactiveMongoRepository.java | 78 + .../EnableReactiveMongoRepositories.java | 141 + ...MongoRepositoryConfigurationExtension.java | 54 +- .../ReactiveMongoRepositoriesRegistrar.java | 49 + ...MongoRepositoryConfigurationExtension.java | 171 ++ .../repository/config/RepositoryType.java | 70 + .../query/AbstractReactiveMongoQuery.java | 212 ++ .../repository/query/MongoQueryExecution.java | 20 +- .../repository/query/MongoQueryMethod.java | 11 + .../query/ReactiveMongoParameterAccessor.java | 96 + .../query/ReactiveMongoQueryExecution.java | 301 ++ .../query/ReactiveMongoQueryMethod.java | 150 + .../query/ReactivePartTreeMongoQuery.java | 150 + .../query/ReactiveStringBasedMongoQuery.java | 143 + .../query/StringBasedMongoQuery.java | 18 +- .../IndexEnsuringQueryCreationListener.java | 14 +- .../support/MongoRepositoryFactory.java | 21 +- .../support/MongoRepositoryFactoryBean.java | 2 +- .../repository/support/ReactiveChunk.java | 253 ++ .../ReactiveMongoRepositoryFactory.java | 230 ++ .../ReactiveMongoRepositoryFactoryBean.java | 120 + .../repository/support/ReactivePageImpl.java | 169 ++ .../repository/support/ReactiveSliceImpl.java | 66 + .../SimpleReactiveMongoRepository.java | 352 +++ .../AbstractMongoConfigurationUnitTests.java | 18 +- ...iveMongoConfigurationIntegrationTests.java | 69 + ...ctReactiveMongoConfigurationUnitTests.java | 226 ++ ...uditingViaJavaConfigRepositoriesTests.java | 2 +- ...efaultIndexOperationsIntegrationTests.java | 3 +- .../mongodb/core/MongoTemplateUnitTests.java | 6 +- .../data/mongodb/core/NoExplicitIdTests.java | 2 +- .../core/QueryCursorPreparerUnitTests.java | 4 +- .../ReactiveMongoTemplateExecuteTests.java | 242 ++ .../core/ReactiveMongoTemplateIndexTests.java | 210 ++ .../core/ReactiveMongoTemplateTests.java | 1023 +++++++ .../core/ReactiveMongoTemplateUnitTests.java | 86 + ...nwrapAndReadDocumentCallbackUnitTests.java | 6 +- .../MappingMongoConverterUnitTests.java | 54 +- ...entEntityIndexCreatorIntegrationTests.java | 7 +- ...PersistentEntityIndexCreatorUnitTests.java | 39 +- .../performance/ReactivePerformanceTests.java | 995 +++++++ ...onvertingReactiveMongoRepositoryTests.java | 293 ++ .../ReactiveMongoRepositoryTests.java | 442 +++ .../repository/ReactivePersonRepository.java | 35 + .../SimpleReactiveMongoRepositoryTests.java | 580 ++++ ...RepositoriesRegistrarIntegrationTests.java | 2 +- ...sitoryConfigurationExtensionUnitTests.java | 2 +- ...RepositoriesRegistrarIntegrationTests.java | 76 + ...sitoryConfigurationExtensionUnitTests.java | 121 + ...estedMongoRepositoriesJavaConfigTests.java | 2 +- .../custom/CustomReactiveMongoRepository.java | 28 + .../CustomReactiveMongoRepositoryCustom.java | 30 + .../CustomReactiveMongoRepositoryImpl.java | 37 + ...ReactiveRepositoryImplementationTests.java | 62 + .../CustomRepositoryImplementationTests.java | 2 +- .../ReactiveMongoQueryExecutionUnitTests.java | 142 + .../ReactiveMongoQueryMethodUnitTests.java | 248 ++ ...eactiveStringBasedMongoQueryUnitTests.java | 297 ++ .../support/ReactivePageImplUnitTests.java | 150 + .../support/ReactiveSliceImplUnitTests.java | 88 + .../java/reactor/test/TestSubscriber.java | 1180 ++++++++ .../resources/reactive-infrastructure.xml | 20 + src/main/asciidoc/index.adoc | 2 + .../reactive-mongo-repositories.adoc | 226 ++ .../asciidoc/reference/reactive-mongodb.adoc | 542 ++++ 94 files changed, 15565 insertions(+), 465 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperationsProvider.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveIndexOperations.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDatabaseHolder.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDbUtils.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/InfiniteStream.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/RepositoryType.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveChunk.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageImpl.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSliceImpl.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactivePageImplUnitTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveSliceImplUnitTests.java create mode 100644 spring-data-mongodb/src/test/java/reactor/test/TestSubscriber.java create mode 100644 spring-data-mongodb/src/test/resources/reactive-infrastructure.xml create mode 100644 src/main/asciidoc/reference/reactive-mongo-repositories.adoc create mode 100644 src/main/asciidoc/reference/reactive-mongodb.adoc diff --git a/pom.xml b/pom.xml index f8b1965de..ff93af8ea 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ spring-data-mongodb 2.0.0.BUILD-SNAPSHOT 3.2.2 + 1.2.0 diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml index 96e56bd02..4a4916871 100644 --- a/spring-data-mongodb-cross-store/pom.xml +++ b/spring-data-mongodb-cross-store/pom.xml @@ -51,6 +51,14 @@ 2.0.0.BUILD-SNAPSHOT + + + io.projectreactor + reactor-core + ${reactor} + true + + org.aspectj aspectjrt diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 9754e8545..c8151d666 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -19,6 +19,7 @@ 1.3 1.5 3.3.0 + 5.0.0.BUILD-SNAPSHOT @@ -79,6 +80,52 @@ true + + + org.mongodb + mongodb-driver-reactivestreams + ${mongo.reactivestreams} + true + + + + org.mongodb + mongodb-driver-async + ${mongo} + true + + + org.mongodb + mongodb-driver-core + + + org.mongodb + bson + + + + + + io.projectreactor + reactor-core + ${reactor} + true + + + + io.reactivex + rxjava + ${rxjava} + true + + + + io.reactivex + rxjava-reactive-streams + ${rxjava-reactive-streams} + true + + javax.enterprise @@ -213,9 +260,11 @@ **/PerformanceTests.java + **/ReactivePerformanceTests.java src/test/resources/logging.properties + true diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java new file mode 100644 index 000000000..52e80b953 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java @@ -0,0 +1,56 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb; + +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; + +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Interface for factories creating reactive {@link MongoDatabase} instances. + * + * @author Mark Paluch + * @since 2.0 + */ +public interface ReactiveMongoDatabaseFactory { + + /** + * Creates a default {@link MongoDatabase} instance. + * + * @return + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase() throws DataAccessException; + + /** + * Creates a {@link MongoDatabase} instance to access the database with the given name. + * + * @param dbName must not be {@literal null} or empty. + * @return + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; + + /** + * Exposes a shared {@link MongoExceptionTranslator}. + * + * @return will never be {@literal null}. + */ + PersistenceExceptionTranslator getExceptionTranslator(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java index e1e92eee5..a72521ade 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java @@ -15,36 +15,16 @@ */ package org.springframework.data.mongodb.config; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; import org.springframework.context.annotation.Configuration; -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.type.filter.AnnotationTypeFilter; -import org.springframework.data.annotation.Persistent; import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; -import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; -import org.springframework.data.mapping.model.FieldNamingStrategy; -import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.SimpleMongoDbFactory; -import org.springframework.data.mongodb.core.convert.CustomConversions; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.support.CachingIsNewStrategyFactory; -import org.springframework.data.support.IsNewStrategyFactory; -import org.springframework.util.ClassUtils; -import org.springframework.util.StringUtils; import com.mongodb.Mongo; import com.mongodb.MongoClient; @@ -57,16 +37,11 @@ import com.mongodb.MongoClient; * @author Thomas Darimont * @author Ryan Tenney * @author Christoph Strobl + * @author Mark Paluch + * @see MongoConfigurationSupport */ @Configuration -public abstract class AbstractMongoConfiguration { - - /** - * Return the name of the database to connect to. - * - * @return must not be {@literal null}. - */ - protected abstract String getDatabaseName(); +public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport { /** * Return the name of the authentication database to use. Defaults to {@literal null} and will turn into the value @@ -120,7 +95,7 @@ public abstract class AbstractMongoConfiguration { * class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending * {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is * overridden to implement alternate behavior. - * + * * @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for * entities. * @deprecated use {@link #getMappingBasePackages()} instead. @@ -132,20 +107,6 @@ public abstract class AbstractMongoConfiguration { return mappingBasePackage == null ? null : mappingBasePackage.getName(); } - /** - * Returns the base packages to scan for MongoDB mapped entities at startup. Will return the package name of the - * configuration class' (the concrete class, not this one here) by default. So if you have a - * {@code com.acme.AppConfig} extending {@link AbstractMongoConfiguration} the base package will be considered - * {@code com.acme} unless the method is overridden to implement alternate behavior. - * - * @return the base packages to scan for mapped {@link Document} classes or an empty collection to not enable scanning - * for entities. - * @since 1.10 - */ - protected Collection getMappingBasePackages() { - return Collections.singleton(getMappingBasePackage()); - } - /** * Return {@link UserCredentials} to be used when connecting to the MongoDB instance or {@literal null} if none shall * be used. @@ -159,47 +120,6 @@ public abstract class AbstractMongoConfiguration { return null; } - /** - * Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package. - * - * @see #getMappingBasePackage() - * @return - * @throws ClassNotFoundException - */ - @Bean - public MongoMappingContext mongoMappingContext() throws ClassNotFoundException { - - MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(getInitialEntitySet()); - mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder()); - mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); - - return mappingContext; - } - - /** - * Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}. - * - * @return - * @throws ClassNotFoundException - */ - @Bean - public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException { - return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(mongoMappingContext())); - } - - /** - * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These - * {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and - * {@link #mongoMappingContext()}. Returns an empty {@link CustomConversions} instance by default. - * - * @return must not be {@literal null}. - */ - @Bean - public CustomConversions customConversions() { - return new CustomConversions(Collections.emptyList()); - } - /** * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. @@ -219,79 +139,4 @@ public abstract class AbstractMongoConfiguration { return converter; } - - /** - * Scans the mapping base package for classes annotated with {@link Document}. By default, it scans for entities in - * all packages returned by {@link #getMappingBasePackages()}. - * - * @see #getMappingBasePackages() - * @return - * @throws ClassNotFoundException - */ - protected Set> getInitialEntitySet() throws ClassNotFoundException { - - Set> initialEntitySet = new HashSet>(); - - for (String basePackage : getMappingBasePackages()) { - initialEntitySet.addAll(scanForEntities(basePackage)); - } - - return initialEntitySet; - } - - /** - * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and - * {@link Persistent}. - * - * @param basePackage must not be {@literal null}. - * @return - * @throws ClassNotFoundException - * @since 1.10 - */ - protected Set> scanForEntities(String basePackage) throws ClassNotFoundException { - - if (!StringUtils.hasText(basePackage)) { - return Collections.emptySet(); - } - - Set> initialEntitySet = new HashSet>(); - - if (StringUtils.hasText(basePackage)) { - - ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider( - false); - componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); - componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); - - for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { - - initialEntitySet - .add(ClassUtils.forName(candidate.getBeanClassName(), AbstractMongoConfiguration.class.getClassLoader())); - } - } - - return initialEntitySet; - } - - /** - * Configures whether to abbreviate field names for domain objects by configuring a - * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced - * customization needs, consider overriding {@link #mappingMongoConverter()}. - * - * @return - */ - protected boolean abbreviateFieldNames() { - return false; - } - - /** - * Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created. - * - * @return - * @since 1.5 - */ - protected FieldNamingStrategy fieldNamingStrategy() { - return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy() - : PropertyNameFieldNamingStrategy.INSTANCE; - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java new file mode 100644 index 000000000..3468a5ce9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java @@ -0,0 +1,90 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Base class for reactive Spring Data MongoDB configuration using JavaConfig. + * + * @author Mark Paluch + * @since 2.0 + * @see MongoConfigurationSupport + */ +@Configuration +public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport { + + /** + * Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a + * {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. + * + * @return + */ + public abstract MongoClient mongoClient(); + + /** + * Creates a {@link ReactiveMongoTemplate}. + * + * @return + */ + @Bean + public ReactiveMongoTemplate reactiveMongoTemplate() throws Exception { + return new ReactiveMongoTemplate(mongoDbFactory(), mappingMongoConverter()); + } + + /** + * Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link Mongo} instance + * configured in {@link #mongoClient()}. + * + * @see #mongoClient() + * @see #reactiveMongoTemplate() + * @return + * @throws Exception + */ + @Bean + public SimpleReactiveMongoDatabaseFactory mongoDbFactory() { + return new SimpleReactiveMongoDatabaseFactory(mongoClient(), getDatabaseName()); + } + + /** + * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and + * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. + * + * @see #customConversions() + * @see #mongoMappingContext() + * @see #mongoDbFactory() + * @return + * @throws Exception + */ + @Bean + public MappingMongoConverter mappingMongoConverter() throws Exception { + + MappingMongoConverter converter = new MappingMongoConverter(ReactiveMongoTemplate.NO_OP_REF_RESOLVER, + mongoMappingContext()); + converter.setCustomConversions(customConversions()); + + return converter; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java index 3aae75689..0c2000291 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java @@ -67,7 +67,7 @@ import org.w3c.dom.Element; /** * Bean definition parser for the {@code mapping-converter} element. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Maciej Walkowiak @@ -120,6 +120,12 @@ public class MappingMongoConverterParser implements BeanDefinitionParser { converterBuilder.addPropertyValue("customConversions", conversionsDefinition); } + if(!registry.containsBeanDefinition("indexOperationsProvider")){ + BeanDefinitionBuilder indexOperationsProviderBuilder = BeanDefinitionBuilder.genericBeanDefinition("org.springframework.data.mongodb.core.DefaultIndexOperationsProvider"); + indexOperationsProviderBuilder.addConstructorArgReference(dbFactoryRef); + parserContext.registerBeanComponent(new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider")); + } + try { registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME); } catch (NoSuchBeanDefinitionException ignored) { @@ -129,7 +135,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser { BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoPersistentEntityIndexCreator.class); indexHelperBuilder.addConstructorArgReference(ctxRef); - indexHelperBuilder.addConstructorArgReference(dbFactoryRef); + indexHelperBuilder.addConstructorArgReference("indexOperationsProvider"); indexHelperBuilder.addDependsOn(ctxRef); parserContext.registerBeanComponent(new BeanComponentDefinition(indexHelperBuilder.getBeanDefinition(), @@ -348,7 +354,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser { /** * {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches. - * + * * @author Oliver Gierke */ private static class NegatingFilter implements TypeFilter { @@ -357,7 +363,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser { /** * Creates a new {@link NegatingFilter} with the given delegates. - * + * * @param filters */ public NegatingFilter(TypeFilter... filters) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java new file mode 100644 index 000000000..7fb731ce5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java @@ -0,0 +1,198 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.config; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.type.filter.AnnotationTypeFilter; +import org.springframework.data.annotation.Persistent; +import org.springframework.data.authentication.UserCredentials; +import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; +import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; +import org.springframework.data.mapping.model.FieldNamingStrategy; +import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; +import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.convert.CustomConversions; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.support.CachingIsNewStrategyFactory; +import org.springframework.data.support.IsNewStrategyFactory; +import org.springframework.util.ClassUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.Mongo; +import com.mongodb.MongoClient; + +/** + * Base class for Spring Data MongoDB to be extended for JavaConfiguration usage. + * + * @author Mark Paluch + * @since 2.0 + */ +public abstract class MongoConfigurationSupport { + + /** + * Return the name of the database to connect to. + * + * @return must not be {@literal null}. + */ + protected abstract String getDatabaseName(); + + /** + * Returns the base packages to scan for MongoDB mapped entities at startup. Will return the package name of the + * configuration class' (the concrete class, not this one here) by default. So if you have a + * {@code com.acme.AppConfig} extending {@link MongoConfigurationSupport} the base package will be considered + * {@code com.acme} unless the method is overridden to implement alternate behavior. + * + * @return the base packages to scan for mapped {@link Document} classes or an empty collection to not enable scanning + * for entities. + * @since 1.10 + */ + protected Collection getMappingBasePackages() { + + Package mappingBasePackage = getClass().getPackage(); + return Collections.singleton(mappingBasePackage == null ? null : mappingBasePackage.getName()); + } + + /** + * Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package. + * + * @see #getMappingBasePackage() + * @return + * @throws ClassNotFoundException + */ + @Bean + public MongoMappingContext mongoMappingContext() throws ClassNotFoundException { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setInitialEntitySet(getInitialEntitySet()); + mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder()); + mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); + + return mappingContext; + } + + /** + * Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}. + * + * @return + * @throws ClassNotFoundException + */ + @Bean + public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException { + return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(mongoMappingContext())); + } + + /** + * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These + * {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and + * {@link #mongoMappingContext()}. Returns an empty {@link CustomConversions} instance by default. + * + * @return must not be {@literal null}. + */ + @Bean + public CustomConversions customConversions() { + return new CustomConversions(Collections.emptyList()); + } + + /** + * Scans the mapping base package for classes annotated with {@link Document}. By default, it scans for entities in + * all packages returned by {@link #getMappingBasePackages()}. + * + * @see #getMappingBasePackages() + * @return + * @throws ClassNotFoundException + */ + protected Set> getInitialEntitySet() throws ClassNotFoundException { + + Set> initialEntitySet = new HashSet>(); + + for (String basePackage : getMappingBasePackages()) { + initialEntitySet.addAll(scanForEntities(basePackage)); + } + + return initialEntitySet; + } + + /** + * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and + * {@link Persistent}. + * + * @param basePackage must not be {@literal null}. + * @return + * @throws ClassNotFoundException + * @since 1.10 + */ + protected Set> scanForEntities(String basePackage) throws ClassNotFoundException { + + if (!StringUtils.hasText(basePackage)) { + return Collections.emptySet(); + } + + Set> initialEntitySet = new HashSet>(); + + if (StringUtils.hasText(basePackage)) { + + ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider( + false); + componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); + componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); + + for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { + + initialEntitySet + .add(ClassUtils.forName(candidate.getBeanClassName(), MongoConfigurationSupport.class.getClassLoader())); + } + } + + return initialEntitySet; + } + + /** + * Configures whether to abbreviate field names for domain objects by configuring a + * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced + * customization needs, consider overriding {@link #mappingMongoConverter()}. + * + * @return + */ + protected boolean abbreviateFieldNames() { + return false; + } + + /** + * Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created. + * + * @return + * @since 1.5 + */ + protected FieldNamingStrategy fieldNamingStrategy() { + return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy() + : PropertyNameFieldNamingStrategy.INSTANCE; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java index e9b3c501f..5ccdf3822 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java @@ -15,18 +15,15 @@ */ package org.springframework.data.mongodb.core; -import static org.springframework.data.domain.Sort.Direction.*; +import static org.springframework.data.mongodb.core.MongoTemplate.potentiallyConvertRuntimeException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.List; -import java.util.concurrent.TimeUnit; import org.bson.Document; import org.springframework.dao.DataAccessException; +import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.index.IndexDefinition; -import org.springframework.data.mongodb.core.index.IndexField; import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.util.Assert; @@ -42,28 +39,25 @@ import com.mongodb.client.model.IndexOptions; * @author Oliver Gierke * @author Komi Innocent * @author Christoph Strobl + * @author Mark Paluch */ public class DefaultIndexOperations implements IndexOperations { - private static final Double ONE = Double.valueOf(1); - private static final Double MINUS_ONE = Double.valueOf(-1); - private static final Collection TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere"); - - private final MongoOperations mongoOperations; + private final MongoDbFactory mongoDbFactory; private final String collectionName; /** * Creates a new {@link DefaultIndexOperations}. * - * @param mongoOperations must not be {@literal null}. + * @param mongoDbFactory must not be {@literal null}. * @param collectionName must not be {@literal null}. */ - public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName) { + public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!"); Assert.notNull(collectionName, "Collection name can not be null!"); - this.mongoOperations = mongoOperations; + this.mongoDbFactory = mongoDbFactory; this.collectionName = collectionName; } @@ -72,57 +66,18 @@ public class DefaultIndexOperations implements IndexOperations { * @see org.springframework.data.mongodb.core.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) */ public void ensureIndex(final IndexDefinition indexDefinition) { - mongoOperations.execute(collectionName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + execute(collection -> { - Document indexOptions = indexDefinition.getIndexOptions(); + Document indexOptions = indexDefinition.getIndexOptions(); - if (indexOptions != null) { + if (indexOptions != null) { - IndexOptions ops = new IndexOptions(); - if (indexOptions.containsKey("name")) { - ops = ops.name(indexOptions.get("name").toString()); - } - if (indexOptions.containsKey("unique")) { - ops = ops.unique((Boolean) indexOptions.get("unique")); - } - if (indexOptions.containsKey("sparse")) { - ops = ops.sparse((Boolean) indexOptions.get("sparse")); - } - if (indexOptions.containsKey("background")) { - ops = ops.background((Boolean) indexOptions.get("background")); - } - if (indexOptions.containsKey("expireAfterSeconds")) { - ops = ops.expireAfter((Long) indexOptions.get("expireAfterSeconds"), TimeUnit.SECONDS); - } - if (indexOptions.containsKey("min")) { - ops = ops.min(((Number) indexOptions.get("min")).doubleValue()); - } - if (indexOptions.containsKey("max")) { - ops = ops.max(((Number) indexOptions.get("max")).doubleValue()); - } - if (indexOptions.containsKey("bits")) { - ops = ops.bits((Integer) indexOptions.get("bits")); - } - if (indexOptions.containsKey("bucketSize")) { - ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); - } - if (indexOptions.containsKey("default_language")) { - ops = ops.defaultLanguage(indexOptions.get("default_language").toString()); - } - if (indexOptions.containsKey("language_override")) { - ops = ops.languageOverride(indexOptions.get("language_override").toString()); - } - if (indexOptions.containsKey("weights")) { - ops = ops.weights((Document) indexOptions.get("weights")); - } - - collection.createIndex(indexDefinition.getIndexKeys(), ops); - } else { - collection.createIndex(indexDefinition.getIndexKeys()); - } - return null; + IndexOptions ops = IndexConverters.DEFINITION_TO_MONGO_INDEX_OPTIONS.convert(indexDefinition); + collection.createIndex(indexDefinition.getIndexKeys(), ops); + } else { + collection.createIndex(indexDefinition.getIndexKeys()); } + return null; }); } @@ -131,7 +86,7 @@ public class DefaultIndexOperations implements IndexOperations { * @see org.springframework.data.mongodb.core.IndexOperations#dropIndex(java.lang.String) */ public void dropIndex(final String name) { - mongoOperations.execute(collectionName, new CollectionCallback() { + execute(new CollectionCallback() { public Void doInCollection(MongoCollection collection) throws MongoException, DataAccessException { collection.dropIndex(name); return null; @@ -154,7 +109,7 @@ public class DefaultIndexOperations implements IndexOperations { */ public List getIndexInfo() { - return mongoOperations.execute(collectionName, new CollectionCallback>() { + return execute(new CollectionCallback>() { public List doInCollection(MongoCollection collection) throws MongoException, DataAccessException { @@ -169,47 +124,24 @@ public class DefaultIndexOperations implements IndexOperations { while (cursor.hasNext()) { Document ix = cursor.next(); - Document keyDocument = (Document) ix.get("key"); - int numberOfElements = keyDocument.keySet().size(); - - List indexFields = new ArrayList(numberOfElements); - - for (String key : keyDocument.keySet()) { - - Object value = keyDocument.get(key); - - if (TWO_D_IDENTIFIERS.contains(value)) { - indexFields.add(IndexField.geo(key)); - } else if ("text".equals(value)) { - - Document weights = (Document) ix.get("weights"); - for (String fieldName : weights.keySet()) { - indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString()))); - } - - } else { - - Double keyValue = new Double(value.toString()); - - if (ONE.equals(keyValue)) { - indexFields.add(IndexField.create(key, ASC)); - } else if (MINUS_ONE.equals(keyValue)) { - indexFields.add(IndexField.create(key, DESC)); - } - } - } - - String name = ix.get("name").toString(); - - boolean unique = ix.containsKey("unique") ? (Boolean) ix.get("unique") : false; - boolean dropDuplicates = ix.containsKey("dropDups") ? (Boolean) ix.get("dropDups") : false; - boolean sparse = ix.containsKey("sparse") ? (Boolean) ix.get("sparse") : false; - String language = ix.containsKey("default_language") ? (String) ix.get("default_language") : ""; - indexInfoList.add(new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language)); + IndexInfo indexInfo = IndexConverters.DOCUMENT_INDEX_INFO.convert(ix); + indexInfoList.add(indexInfo); } return indexInfoList; } }); } + + public T execute(CollectionCallback callback) { + + Assert.notNull(callback); + + try { + MongoCollection collection = mongoDbFactory.getDb().getCollection(collectionName); + return callback.doInCollection(collection); + } catch (RuntimeException e) { + throw potentiallyConvertRuntimeException(e, mongoDbFactory.getExceptionTranslator()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java new file mode 100644 index 000000000..f01f95670 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java @@ -0,0 +1,45 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.MongoDbFactory; + +/** + * {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}. TODO: Review + * me + * + * @author Mark Paluch + * @since 2.0 + */ +class DefaultIndexOperationsProvider implements IndexOperationsProvider { + + private final MongoDbFactory mongoDbFactory; + + /** + * @param mongoDbFactory must not be {@literal null}. + */ + DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory) { + this.mongoDbFactory = mongoDbFactory; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.IndexOperationsProvider#reactiveIndexOps(java.lang.String) + */ + @Override + public IndexOperations indexOps(String collectionName) { + return new DefaultIndexOperations(mongoDbFactory, collectionName); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java new file mode 100644 index 000000000..adb168ecb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java @@ -0,0 +1,102 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.util.Assert; + +import com.mongodb.reactivestreams.client.ListIndexesPublisher; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Default implementation of {@link IndexOperations}. + * + * @author Mark Paluch + * @since 1.11 + */ +public class DefaultReactiveIndexOperations implements ReactiveIndexOperations { + + private final ReactiveMongoOperations mongoOperations; + private final String collectionName; + + /** + * Creates a new {@link DefaultReactiveIndexOperations}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + */ + public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName) { + + Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!"); + Assert.notNull(collectionName, "Collection must not be null!"); + + this.mongoOperations = mongoOperations; + this.collectionName = collectionName; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveIndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) + */ + public Mono ensureIndex(final IndexDefinition indexDefinition) { + + return mongoOperations.execute(collectionName, (ReactiveCollectionCallback) collection -> { + + Document indexOptions = indexDefinition.getIndexOptions(); + + if (indexOptions != null) { + return collection.createIndex(indexDefinition.getIndexKeys(), + IndexConverters.DEFINITION_TO_MONGO_INDEX_OPTIONS.convert(indexDefinition)); + } + + return collection.createIndex(indexDefinition.getIndexKeys()); + }).next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveIndexOperations#dropIndex(java.lang.String) + */ + public Mono dropIndex(final String name) { + + return mongoOperations.execute(collectionName, collection -> { + + return Mono.from(collection.dropIndex(name)); + }).flatMap(success -> Mono.empty()).next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveIndexOperations#dropAllIndexes() + */ + public Mono dropAllIndexes() { + return dropIndex("*"); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveIndexOperations#getIndexInfo() + */ + public Flux getIndexInfo() { + + return mongoOperations.execute(collectionName, collection -> { + + ListIndexesPublisher indexesPublisher = collection.listIndexes(Document.class); + + return Flux.from(indexesPublisher).map(t -> IndexConverters.DOCUMENT_INDEX_INFO.convert(t)); + }); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java new file mode 100644 index 000000000..a41203d38 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java @@ -0,0 +1,34 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import com.mongodb.DBCursor; +import com.mongodb.reactivestreams.client.FindPublisher; + +/** + * Simple callback interface to allow customization of a {@link FindPublisher}. + * + * @author Mark Paluch + */ +interface FindPublisherPreparer { + + /** + * Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor. + * + * @param cursor + */ + FindPublisher prepare(FindPublisher findPublisher); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java new file mode 100644 index 000000000..44ea31941 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -0,0 +1,158 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import static org.springframework.data.domain.Sort.Direction.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexField; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.model.IndexOptions; + +/** + * {@link Converter Converters} for index-related MongoDB documents/types. + * + * @author Mark Paluch + * @since 2.0 + */ +abstract class IndexConverters { + + public final static Converter DEFINITION_TO_MONGO_INDEX_OPTIONS; + public final static Converter DOCUMENT_INDEX_INFO; + + private static final Double ONE = Double.valueOf(1); + private static final Double MINUS_ONE = Double.valueOf(-1); + private static final Collection TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere"); + + static { + + DEFINITION_TO_MONGO_INDEX_OPTIONS = getIndexDefinitionIndexOptionsConverter(); + DOCUMENT_INDEX_INFO = getDocumentIndexInfoConverter(); + } + + private IndexConverters() { + + } + + private static Converter getIndexDefinitionIndexOptionsConverter() { + + return indexDefinition -> { + + Document indexOptions = indexDefinition.getIndexOptions(); + IndexOptions ops = new IndexOptions(); + + if (indexOptions.containsKey("name")) { + ops = ops.name(indexOptions.get("name").toString()); + } + if (indexOptions.containsKey("unique")) { + ops = ops.unique((Boolean) indexOptions.get("unique")); + } + if (indexOptions.containsKey("sparse")) { + ops = ops.sparse((Boolean) indexOptions.get("sparse")); + } + if (indexOptions.containsKey("background")) { + ops = ops.background((Boolean) indexOptions.get("background")); + } + if (indexOptions.containsKey("expireAfterSeconds")) { + ops = ops.expireAfter((Long) indexOptions.get("expireAfterSeconds"), TimeUnit.SECONDS); + } + if (indexOptions.containsKey("min")) { + ops = ops.min(((Number) indexOptions.get("min")).doubleValue()); + } + if (indexOptions.containsKey("max")) { + ops = ops.max(((Number) indexOptions.get("max")).doubleValue()); + } + if (indexOptions.containsKey("bits")) { + ops = ops.bits((Integer) indexOptions.get("bits")); + } + if (indexOptions.containsKey("bucketSize")) { + ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); + } + if (indexOptions.containsKey("default_language")) { + ops = ops.defaultLanguage(indexOptions.get("default_language").toString()); + } + if (indexOptions.containsKey("language_override")) { + ops = ops.languageOverride(indexOptions.get("language_override").toString()); + } + if (indexOptions.containsKey("weights")) { + ops = ops.weights((org.bson.Document) indexOptions.get("weights")); + } + + for (String key : indexOptions.keySet()) { + if (ObjectUtils.nullSafeEquals("2dsphere", indexOptions.get(key))) { + ops = ops.sphereVersion(2); + } + } + + return ops; + }; + } + + private static Converter getDocumentIndexInfoConverter() { + + return ix -> { + Document keyDbObject = (Document) ix.get("key"); + int numberOfElements = keyDbObject.keySet().size(); + + List indexFields = new ArrayList(numberOfElements); + + for (String key : keyDbObject.keySet()) { + + Object value = keyDbObject.get(key); + + if (TWO_D_IDENTIFIERS.contains(value)) { + indexFields.add(IndexField.geo(key)); + } else if ("text".equals(value)) { + + Document weights = (Document) ix.get("weights"); + for (String fieldName : weights.keySet()) { + indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString()))); + } + + } else { + + Double keyValue = new Double(value.toString()); + + if (ONE.equals(keyValue)) { + indexFields.add(IndexField.create(key, ASC)); + } else if (MINUS_ONE.equals(keyValue)) { + indexFields.add(IndexField.create(key, DESC)); + } + } + } + + String name = ix.get("name").toString(); + + boolean unique = ix.containsKey("unique") ? (Boolean) ix.get("unique") : false; + boolean dropDuplicates = ix.containsKey("dropDups") ? (Boolean) ix.get("dropDups") : false; + boolean sparse = ix.containsKey("sparse") ? (Boolean) ix.get("sparse") : false; + + String language = ix.containsKey("default_language") ? (String) ix.get("default_language") : ""; + return new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language); + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperationsProvider.java new file mode 100644 index 000000000..de2102ede --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperationsProvider.java @@ -0,0 +1,33 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import org.springframework.dao.support.PersistenceExceptionTranslator; + +/** + * TODO: Revisit for a better pattern. + * @author Mark Paluch + */ +public interface IndexOperationsProvider { + + /** + * Returns the operations that can be performed on indexes + * + * @return index operations on the named collection + */ + IndexOperations indexOps(String collectionName); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index 407a80c81..a8d89a0e8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -145,7 +145,7 @@ import com.mongodb.util.JSONParseException; * @author Mark Paluch */ @SuppressWarnings("deprecation") -public class MongoTemplate implements MongoOperations, ApplicationContextAware { +public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider { private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class); private static final String ID_FIELD = "_id"; @@ -230,7 +230,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { mappingContext = this.mongoConverter.getMappingContext(); // We create indexes based on mapping events if (null != mappingContext && mappingContext instanceof MongoMappingContext) { - indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, mongoDbFactory); + indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, this); eventPublisher = new MongoMappingEventPublisher(indexCreator); if (mappingContext instanceof ApplicationEventPublisherAware) { ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); @@ -539,11 +539,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { } public IndexOperations indexOps(String collectionName) { - return new DefaultIndexOperations(this, collectionName); + return new DefaultIndexOperations(getMongoDbFactory(), collectionName); } public IndexOperations indexOps(Class entityClass) { - return new DefaultIndexOperations(this, determineCollectionName(entityClass)); + return new DefaultIndexOperations(getMongoDbFactory(), determineCollectionName(entityClass)); } public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) { @@ -2039,6 +2039,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { } } + public PersistenceExceptionTranslator getExceptionTranslator() { + return exceptionTranslator; + } + private MongoPersistentEntity getPersistentEntity(Class type) { return type == null ? null : mappingContext.getPersistentEntity(type); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java new file mode 100644 index 000000000..9e5d876fc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java @@ -0,0 +1,29 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.dao.DataAccessException; + +import com.mongodb.MongoException; +import com.mongodb.reactivestreams.client.MongoCollection; +import org.bson.Document; +import org.reactivestreams.Publisher; + +public interface ReactiveCollectionCallback { + + Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java new file mode 100644 index 000000000..32198bdbb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java @@ -0,0 +1,27 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.dao.DataAccessException; + +import com.mongodb.MongoException; +import com.mongodb.reactivestreams.client.MongoDatabase; +import org.reactivestreams.Publisher; + +public interface ReactiveDatabaseCallback { + + Publisher doInDB(MongoDatabase db) throws MongoException, DataAccessException; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveIndexOperations.java new file mode 100644 index 000000000..45646526d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveIndexOperations.java @@ -0,0 +1,58 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Index operations on a collection. + * + * @author Mark Paluch + * @since 1.11 + */ +public interface ReactiveIndexOperations { + + /** + * Ensure that an index for the provided {@link IndexDefinition} exists for the collection indicated by the entity + * class. If not it will be created. + * + * @param indexDefinition must not be {@literal null}. + */ + Mono ensureIndex(IndexDefinition indexDefinition); + + /** + * Drops an index from this collection. + * + * @param name name of index to drop + */ + Mono dropIndex(String name); + + /** + * Drops all indices from this collection. + */ + Mono dropAllIndexes(); + + /** + * Returns the index information on the collection. + * + * @return index information on the collection + */ + Flux getIndexInfo(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java new file mode 100644 index 000000000..f9cb26f77 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java @@ -0,0 +1,127 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.util.StringUtils; + +import com.mongodb.async.client.MongoClientSettings; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; + +/** + * Convenient factory for configuring a reactive streams {@link MongoClient}. + * + * @author Mark Paluch + */ +public class ReactiveMongoClientFactoryBean extends AbstractFactoryBean + implements PersistenceExceptionTranslator { + + private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); + + private String connectionString; + private String host; + private Integer port; + private MongoClientSettings mongoClientSettings; + private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; + + /** + * Configures the host to connect to. + * + * @param host + */ + public void setHost(String host) { + this.host = host; + } + + /** + * Configures the port to connect to. + * + * @param port + */ + public void setPort(int port) { + this.port = port; + } + + /** + * Configures the connection string. + * + * @param connectionString + */ + public void setConnectionString(String connectionString) { + this.connectionString = connectionString; + } + + /** + * Configures the mongo client settings. + * + * @param mongoClientSettings + */ + public void setMongoClientSettings(MongoClientSettings mongoClientSettings) { + this.mongoClientSettings = mongoClientSettings; + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to use. + * + * @param exceptionTranslator + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; + } + + @Override + public Class getObjectType() { + return MongoClient.class; + } + + @Override + protected MongoClient createInstance() throws Exception { + + if (mongoClientSettings != null) { + return MongoClients.create(mongoClientSettings); + } + + if (StringUtils.hasText(connectionString)) { + return MongoClients.create(connectionString); + } + + if (StringUtils.hasText(host)) { + + if (port != null) { + return MongoClients.create(String.format("mongodb://%s:%d", host, port)); + } + + return MongoClients.create(String.format("mongodb://%s", host)); + } + + throw new IllegalStateException( + "Cannot create MongoClients. One of the following is required: mongoClientSettings, connectionString or host/port"); + } + + @Override + protected void destroyInstance(MongoClient instance) throws Exception { + instance.close(); + } + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return DEFAULT_EXCEPTION_TRANSLATOR.translateExceptionIfPossible(ex); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java new file mode 100644 index 000000000..3c2d7f06a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java @@ -0,0 +1,206 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; + +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.util.Assert; + +import com.mongodb.MongoCredential; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.async.client.MongoClientSettings; +import com.mongodb.connection.ClusterSettings; +import com.mongodb.connection.ConnectionPoolSettings; +import com.mongodb.connection.ServerSettings; +import com.mongodb.connection.SocketSettings; +import com.mongodb.connection.SslSettings; +import com.mongodb.connection.StreamFactoryFactory; + +/** + * A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver. + * + * @author Mark Paluch + * @since 1.7 + */ +public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean { + + private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build(); + + private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference(); + private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern(); + private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern(); + private List credentialList = new ArrayList<>(); + private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory(); + private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry(); + private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings(); + private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings(); + private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings(); + private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings(); + private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings(); + private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings(); + + /** + * Set the {@link ReadPreference}. + * + * @param readPreference + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + /** + * Set the {@link WriteConcern}. + * + * @param writeConcern + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * Set the {@link ReadConcern}. + * + * @param readConcern + */ + public void setReadConcern(ReadConcern readConcern) { + this.readConcern = readConcern; + } + + /** + * Set the List of {@link MongoCredential}s. + * + * @param credentialList must not be {@literal null}. + */ + public void setCredentialList(List credentialList) { + + Assert.notNull(credentialList, "CredendialList must not be null!"); + + this.credentialList.addAll(credentialList); + } + + /** + * Adds the {@link MongoCredential} to the list of credentials. + * + * @param mongoCredential must not be {@literal null}. + */ + public void addMongoCredential(MongoCredential mongoCredential) { + + Assert.notNull(mongoCredential, "MongoCredential must not be null!"); + + this.credentialList.add(mongoCredential); + } + + /** + * Set the {@link StreamFactoryFactory}. + * + * @param streamFactoryFactory + */ + public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) { + this.streamFactoryFactory = streamFactoryFactory; + } + + /** + * Set the {@link CodecRegistry}. + * + * @param codecRegistry + */ + public void setCodecRegistry(CodecRegistry codecRegistry) { + this.codecRegistry = codecRegistry; + } + + /** + * Set the {@link ClusterSettings}. + * + * @param clusterSettings + */ + public void setClusterSettings(ClusterSettings clusterSettings) { + this.clusterSettings = clusterSettings; + } + + /** + * Set the {@link SocketSettings}. + * + * @param socketSettings + */ + public void setSocketSettings(SocketSettings socketSettings) { + this.socketSettings = socketSettings; + } + + /** + * Set the heartbeat {@link SocketSettings}. + * + * @param heartbeatSocketSettings + */ + public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) { + this.heartbeatSocketSettings = heartbeatSocketSettings; + } + + /** + * Set the {@link ConnectionPoolSettings}. + * + * @param connectionPoolSettings + */ + public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) { + this.connectionPoolSettings = connectionPoolSettings; + } + + /** + * Set the {@link ServerSettings}. + * + * @param serverSettings + */ + public void setServerSettings(ServerSettings serverSettings) { + this.serverSettings = serverSettings; + } + + /** + * Set the {@link SslSettings}. + * + * @param sslSettings + */ + public void setSslSettings(SslSettings sslSettings) { + this.sslSettings = sslSettings; + } + + @Override + public Class getObjectType() { + return MongoClientSettings.class; + } + + @Override + protected MongoClientSettings createInstance() throws Exception { + + return MongoClientSettings.builder() // + .readPreference(readPreference) // + .writeConcern(writeConcern) // + .readConcern(readConcern) // + .credentialList(credentialList) // + .streamFactoryFactory(streamFactoryFactory) // + .codecRegistry(codecRegistry) // + .clusterSettings(clusterSettings) // + .socketSettings(socketSettings) // + .heartbeatSocketSettings(heartbeatSocketSettings) // + .connectionPoolSettings(connectionPoolSettings) // + .serverSettings(serverSettings) // + .sslSettings(sslSettings) // + .build(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDatabaseHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDatabaseHolder.java new file mode 100644 index 000000000..5679fa2ef --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDatabaseHolder.java @@ -0,0 +1,88 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + + +import com.mongodb.reactivestreams.client.MongoDatabase; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.springframework.transaction.support.ResourceHolderSupport; +import org.springframework.util.Assert; + + +/** + * @author Mark Paluch + */ +class ReactiveMongoDatabaseHolder extends ResourceHolderSupport { + private static final Object DEFAULT_KEY = new Object(); + + private final Map dbMap = new ConcurrentHashMap(); + + public ReactiveMongoDatabaseHolder(MongoDatabase db) { + addMongoDatabase(db); + } + + public ReactiveMongoDatabaseHolder(Object key, MongoDatabase db) { + addMongoDatabase(key, db); + } + + public MongoDatabase getMongoDatabase() { + return getMongoDatabase(DEFAULT_KEY); + } + + public MongoDatabase getMongoDatabase(Object key) { + return this.dbMap.get(key); + } + + public MongoDatabase getAnyMongoDatabase() { + if (!this.dbMap.isEmpty()) { + return this.dbMap.values().iterator().next(); + } + return null; + } + + public void addMongoDatabase(MongoDatabase session) { + addMongoDatabase(DEFAULT_KEY, session); + } + + public void addMongoDatabase(Object key, MongoDatabase session) { + Assert.notNull(key, "Key must not be null"); + Assert.notNull(session, "DB must not be null"); + this.dbMap.put(key, session); + } + + public MongoDatabase removeMongoDatabase(Object key) { + return this.dbMap.remove(key); + } + + public boolean containsMongoDatabase(MongoDatabase session) { + return this.dbMap.containsValue(session); + } + + public boolean isEmpty() { + return this.dbMap.isEmpty(); + } + + public boolean doesNotHoldNonDefaultMongoDatabase() { + synchronized (this.dbMap) { + return this.dbMap.isEmpty() || (this.dbMap.size() == 1 && this.dbMap.containsKey(DEFAULT_KEY)); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDbUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDbUtils.java new file mode 100644 index 000000000..f526fb7b4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDbUtils.java @@ -0,0 +1,150 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoDatabase; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.data.authentication.UserCredentials; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.transaction.support.TransactionSynchronizationManager; + + +/** + * Helper class featuring helper methods for internal MongoDb classes. Mainly intended for internal use within the + * framework. + * + * @author Mark Paluch + */ +public abstract class ReactiveMongoDbUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoDbUtils.class); + + /** + * Private constructor to prevent instantiation. + */ + private ReactiveMongoDbUtils() {} + + /** + * Obtains a {@link MongoDatabase} connection for the given {@link MongoClient} instance and database name + * + * @param mongo the {@link MongoClient} instance, must not be {@literal null}. + * @param databaseName the database name, must not be {@literal null} or empty. + * @return the {@link MongoDatabase} connection + */ + public static MongoDatabase getMongoDatabase(MongoClient mongo, String databaseName) { + return doGetMongoDatabase(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true, databaseName); + } + + private static MongoDatabase doGetMongoDatabase(MongoClient mongo, String databaseName, UserCredentials credentials, + boolean allowCreate, String authenticationDatabaseName) { + + ReactiveMongoDatabaseHolder dbHolder = (ReactiveMongoDatabaseHolder) TransactionSynchronizationManager + .getResource(mongo); + + // Do we have a populated holder and TX sync active? + if (dbHolder != null && !dbHolder.isEmpty() && TransactionSynchronizationManager.isSynchronizationActive()) { + + MongoDatabase db = dbHolder.getMongoDatabase(databaseName); + + // DB found but not yet synchronized + if (db != null && !dbHolder.isSynchronizedWithTransaction()) { + + LOGGER.debug("Registering Spring transaction synchronization for existing MongoDB {}.", databaseName); + + TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(dbHolder, mongo)); + dbHolder.setSynchronizedWithTransaction(true); + } + + if (db != null) { + return db; + } + } + + // Lookup fresh database instance + LOGGER.debug("Getting Mongo Database name=[{}]", databaseName); + + MongoDatabase db = mongo.getDatabase(databaseName); + + // TX sync active, bind new database to thread + if (TransactionSynchronizationManager.isSynchronizationActive()) { + + LOGGER.debug("Registering Spring transaction synchronization for MongoDB instance {}.", databaseName); + + ReactiveMongoDatabaseHolder holderToUse = dbHolder; + + if (holderToUse == null) { + holderToUse = new ReactiveMongoDatabaseHolder(databaseName, db); + } else { + holderToUse.addMongoDatabase(databaseName, db); + } + + // synchronize holder only if not yet synchronized + if (!holderToUse.isSynchronizedWithTransaction()) { + TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo)); + holderToUse.setSynchronizedWithTransaction(true); + } + + if (holderToUse != dbHolder) { + TransactionSynchronizationManager.bindResource(mongo, holderToUse); + } + } + + // Check whether we are allowed to return the DB. + if (!allowCreate && !isDBTransactional(db, mongo)) { + throw new IllegalStateException( + "No Mongo DB bound to thread, " + "and configuration does not allow creation of non-transactional one here"); + } + + return db; + } + + /** + * Return whether the given DB instance is transactional, that is, bound to the current thread by Spring's transaction + * facilities. + * + * @param db the DB to check + * @param mongoClient the Mongo instance that the DB was created with (may be null) + * @return whether the DB is transactional + */ + public static boolean isDBTransactional(MongoDatabase db, MongoClient mongoClient) { + + if (mongoClient == null) { + return false; + } + ReactiveMongoDatabaseHolder dbHolder = (ReactiveMongoDatabaseHolder) TransactionSynchronizationManager + .getResource(mongoClient); + return dbHolder != null && dbHolder.containsMongoDatabase(db); + } + + /** + * Check if credentials present. In case we're using a mongo-java-driver version 3 or above we do not have the need + * for authentication as the auth data has to be provided within the MongoClient + * + * @param credentials + * @return + */ + private static boolean requiresAuthDbAuthentication(UserCredentials credentials) { + + if (credentials == null || !credentials.hasUsername()) { + return false; + } + + return !MongoClientVersion.isMongo3Driver(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java new file mode 100644 index 000000000..e6ed168a9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java @@ -0,0 +1,958 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Collection; + +import org.bson.Document; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscription; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.GeoResults; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.ReadPreference; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.MongoCollection; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Interface that specifies a basic set of MongoDB operations executed in a reactive way. + *

+ * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability + * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using + * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}. + * + * @author Mark Paluch + * @see Flux + * @see Mono + * @see http://projectreactor.io/docs/ + * @since 2.0 + */ +public interface ReactiveMongoOperations { + + /** + * Returns the reactive operations that can be performed on indexes + * + * @return index operations on the named collection + */ + ReactiveIndexOperations reactiveIndexOps(String collectionName); + + /** + * Returns the reactive operations that can be performed on indexes + * + * @return index operations on the named collection associated with the given entity class + */ + ReactiveIndexOperations reactiveIndexOps(Class entityClass); + + /** + * Returns the operations that can be performed on indexes + * + * @return index operations on the named collection + */ + IndexOperations indexOps(String collectionName); + + /** + * Returns the operations that can be performed on indexes + * + * @return index operations on the named collection associated with the given entity class + */ + IndexOperations indexOps(Class entityClass); + + /** + * Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the + * MongoDB driver to convert the JSON string to a DBObject. Any errors that result from executing this command will be + * converted into Spring's DAO exception hierarchy. + * + * @param jsonCommand a MongoDB command expressed as a JSON string. + * @return a result object returned by the action + */ + Mono executeCommand(String jsonCommand); + + /** + * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's DAO + * exception hierarchy. + * + * @param command a MongoDB command + * @return a result object returned by the action + */ + Mono executeCommand(Document command); + + /** + * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's data + * access exception hierarchy. + * + * @param command a MongoDB command, must not be {@literal null}. + * @param readPreference read preferences to use, can be {@literal null}. + * @return a result object returned by the action + */ + Mono executeCommand(Document command, ReadPreference readPreference); + + /** + * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary. + *

+ * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param return type + * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. + * @return a result object returned by the action + */ + Flux execute(ReactiveDatabaseCallback action); + + /** + * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class. + *

+ * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param entityClass class that determines the collection to use + * @param return type + * @param action callback object that specifies the MongoDB action + * @return a result object returned by the action or null + */ + Flux execute(Class entityClass, ReactiveCollectionCallback action); + + /** + * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name. + *

+ * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param return type + * @param collectionName the name of the collection that specifies which DBCollection instance will be passed into + * @param action callback object that specifies the MongoDB action the callback action. + * @return a result object returned by the action or null + */ + Flux execute(String collectionName, ReactiveCollectionCallback action); + + /** + * Create an uncapped collection with a name based on the provided entity class. + * + * @param entityClass class that determines the collection to create + * @return the created collection + */ + Mono> createCollection(Class entityClass); + + /** + * Create a collection with a name based on the provided entity class using the options. + * + * @param entityClass class that determines the collection to create + * @param collectionOptions options to use when creating the collection. + * @return the created collection + */ + Mono> createCollection(Class entityClass, CollectionOptions collectionOptions); + + /** + * Create an uncapped collection with the provided name. + * + * @param collectionName name of the collection + * @return the created collection + */ + Mono> createCollection(String collectionName); + + /** + * Create a collection with the provided name and options. + * + * @param collectionName name of the collection + * @param collectionOptions options to use when creating the collection. + * @return the created collection + */ + Mono> createCollection(String collectionName, CollectionOptions collectionOptions); + + /** + * A set of collection names. + * + * @return list of collection names + */ + Flux getCollectionNames(); + + /** + * Get a collection by name, creating it if it doesn't exist. + *

+ * Translate any exceptions as necessary. + * + * @param collectionName name of the collection + * @return an existing collection or a newly created one. + */ + MongoCollection getCollection(String collectionName); + + /** + * Check to see if a collection with a name indicated by the entity class exists. + *

+ * Translate any exceptions as necessary. + * + * @param entityClass class that determines the name of the collection + * @return true if a collection with the given name is found, false otherwise. + */ + Mono collectionExists(Class entityClass); + + /** + * Check to see if a collection with a given name exists. + *

+ * Translate any exceptions as necessary. + * + * @param collectionName name of the collection + * @return true if a collection with the given name is found, false otherwise. + */ + Mono collectionExists(String collectionName); + + /** + * Drop the collection with the name indicated by the entity class. + *

+ * Translate any exceptions as necessary. + * + * @param entityClass class that determines the collection to drop/delete. + */ + Mono dropCollection(Class entityClass); + + /** + * Drop the collection with the given name. + *

+ * Translate any exceptions as necessary. + * + * @param collectionName name of the collection to drop/delete. + */ + Mono dropCollection(String collectionName); + + /** + * Query for a list of objects of type T from the collection used by the entity class. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + * + * @param entityClass the parametrized type of the returned list + * @return the converted collection + */ + Flux findAll(Class entityClass); + + /** + * Query for a list of objects of type T from the specified collection. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + * + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted collection + */ + Flux findAll(Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the + * specified type. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the converted object + */ + Mono findOne(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified + * type. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted object + */ + Mono findOne(Query query, Class entityClass, String collectionName); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param collectionName name of the collection to check for objects. + * @return + */ + Mono exists(Query query, String collectionName); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param entityClass the parametrized type. + * @return + */ + Mono exists(Query query, Class entityClass); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param entityClass the parametrized type. + * @param collectionName name of the collection to check for objects. + * @return + */ + Mono exists(Query query, Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the List of converted objects + */ + Flux find(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a List of the specified type. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the List of converted objects + */ + Flux find(Query query, Class entityClass, String collectionName); + + /** + * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be + * derived from the given target class as well. + * + * @param + * @param id the id of the document to return. + * @param entityClass the type the document shall be converted into. + * @return the document with the given id mapped onto the given target class. + */ + Mono findById(Object id, Class entityClass); + + /** + * Returns the document with the given id from the given collection mapped onto the given target class. + * + * @param id the id of the document to return + * @param entityClass the type to convert the document to + * @param collectionName the collection to query for the document + * @param + * @return + */ + Mono findById(Object id, Class entityClass, String collectionName); + + /** + * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Will consider entity mapping + * information to determine the collection the query is ran against. Note, that MongoDB limits the number of results + * by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of + * results. + * + * @param near must not be {@literal null}. + * @param entityClass must not be {@literal null}. + * @return + */ + Flux> geoNear(NearQuery near, Class entityClass); + + /** + * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the + * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a + * particular number of results. + * + * @param near must not be {@literal null}. + * @param entityClass must not be {@literal null}. + * @param collectionName the collection to trigger the query against. If no collection name is given the entity class + * will be inspected. + * @return + */ + Flux> geoNear(NearQuery near, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param entityClass the parametrized type. + * @return + */ + Mono findAndModify(Query query, Update update, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param entityClass the parametrized type. + * @param collectionName the collection to query. + * @return + */ + Mono findAndModify(Query query, Update update, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * {@link FindAndModifyOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param options the {@link FindAndModifyOptions} holding additional information. + * @param entityClass the parametrized type. + * @return + */ + Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * {@link FindAndModifyOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param options the {@link FindAndModifyOptions} holding additional information. + * @param entityClass the parametrized type. + * @param collectionName the collection to query. + * @return + */ + Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the + * specified type. The first document that matches the query is returned and also removed from the collection in the + * database. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. + *

+ * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the converted object + */ + Mono findAndRemove(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified + * type. The first document that matches the query is returned and also removed from the collection in the database. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted object + */ + Mono findAndRemove(Query query, Class entityClass, String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + * + * @param query + * @param entityClass must not be {@literal null}. + * @return + */ + Mono count(Query query, Class entityClass); + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. TO make sure the query gets mapped, use {@link #count(Query, Class, String)}. + * + * @param query + * @param collectionName must not be {@literal null} or empty. + * @return + * @see #count(Query, Class, String) + */ + Mono count(Query query, String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}. + * + * @param query + * @param entityClass must not be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return + */ + Mono count(Query query, Class entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save. + *

+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. + *

+ * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + *

+ *

+ * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection. + * @return + */ + Mono insert(T objectToSave); + + /** + * Insert the object into the specified collection. + *

+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono insert(T objectToSave, String collectionName); + + /** + * Insert a Collection of objects into a collection in a single batch write to the database. + * + * @param batchToSave the list of objects to save. + * @param entityClass class that determines the collection to use + * @return + */ + Flux insert(Collection batchToSave, Class entityClass); + + /** + * Insert a list of objects into the specified collection in a single batch write to the database. + * + * @param batchToSave the list of objects to save. + * @param collectionName name of the collection to store the object in + * @return + */ + Flux insert(Collection batchToSave, String collectionName); + + /** + * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the + * class. + * + * @param objectsToSave the list of objects to save. + * @return + */ + Flux insertAll(Collection objectsToSave); + + /** + * Insert the object into the collection for the entity type of the object to save. + *

+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. + *

+ * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + *

+ *

+ * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection. + * @return + */ + Mono insert(Mono objectToSave); + + /** + * Insert a Collection of objects into a collection in a single batch write to the database. + * + * @param batchToSave the publisher which provides objects to save. + * @param entityClass class that determines the collection to use + * @return + */ + Flux insert(Publisher batchToSave, Class entityClass); + + /** + * Insert a list of objects into the specified collection in a single batch write to the database. + * + * @param batchToSave the publisher which provides objects to save. + * @param collectionName name of the collection to store the object in + * @return + */ + Flux insert(Publisher batchToSave, String collectionName); + + /** + * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the + * class. + * + * @param objectsToSave the publisher which provides objects to save. + * @return + */ + Flux insertAll(Publisher objectsToSave); + + /** + * Save the object to the collection for the entity type of the object to save. This will perform an insert if the + * object is not already present, that is an 'upsert'. + *

+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @return + */ + Mono save(T objectToSave); + + /** + * Save the object to the specified collection. This will perform an insert if the object is not already present, that + * is an 'upsert'. + *

+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's + * Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono save(T objectToSave, String collectionName); + + /** + * Save the object to the collection for the entity type of the object to save. This will perform an insert if the + * object is not already present, that is an 'upsert'. + *

+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @return + */ + Mono save(Mono objectToSave); + + /** + * Save the object to the specified collection. This will perform an insert if the object is not already present, that + * is an 'upsert'. + *

+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's + * Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono save(Mono objectToSave, String collectionName); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be upserted + * @param update the update document that contains the updated object or $ operators to manipulate the existing object + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, Class entityClass); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, String collectionName); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be upserted + * @param update the update document that contains the updated object or $ operators to manipulate the existing object + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, Class entityClass, String collectionName); + + /** + * Updates the first object that is found in the collection of the entity class that matches the query document with + * the provided update document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, Class entityClass); + + /** + * Updates the first object that is found in the specified collection that matches the query document criteria with + * the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, String collectionName); + + /** + * Updates the first object that is found in the specified collection that matches the query document criteria with + * the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, Class entityClass, String collectionName); + + /** + * Updates all objects that are found in the collection for the entity class that matches the query document criteria + * with the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(Query query, Update update, Class entityClass); + + /** + * Updates all objects that are found in the specified collection that matches the query document criteria with the + * provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(Query query, Update update, String collectionName); + + /** + * Updates all objects that are found in the collection for the entity class that matches the query document criteria + * with the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(final Query query, final Update update, Class entityClass, String collectionName); + + /** + * Remove the given object from the collection by id. + * + * @param object + * @return + */ + Mono remove(Object object); + + /** + * Removes the given object from the given collection. + * + * @param object + * @param collection must not be {@literal null} or empty. + */ + Mono remove(Object object, String collection); + + /** + * Remove the given object from the collection by id. + * + * @param objectToRemove + * @return + */ + Mono remove(Mono objectToRemove); + + /** + * Removes the given object from the given collection. + * + * @param objectToRemove + * @param collection must not be {@literal null} or empty. + * @return + */ + Mono remove(Mono objectToRemove, String collection); + + /** + * Remove all documents that match the provided query document criteria from the the collection used to store the + * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. + * + * @param query + * @param entityClass + * @return + */ + Mono remove(Query query, Class entityClass); + + /** + * Remove all documents that match the provided query document criteria from the the collection used to store the + * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. + * + * @param query + * @param entityClass + * @param collectionName + * @return + */ + Mono remove(Query query, Class entityClass, String collectionName); + + /** + * Remove all documents from the specified collection that match the provided query document criteria. There is no + * conversion/mapping done for any criteria using the id field. + * + * @param query the query document that specifies the criteria used to remove a record + * @param collectionName name of the collection where the objects will removed + */ + Mono remove(Query query, String collectionName); + + /** + * Returns and removes all documents form the specified collection that match the provided query. + * + * @param query + * @param collectionName + * @return + */ + Flux findAllAndRemove(Query query, String collectionName); + + /** + * Returns and removes all documents matching the given query form the collection used to store the entityClass. + * + * @param query + * @param entityClass + * @return + */ + Flux findAllAndRemove(Query query, Class entityClass); + + /** + * Returns and removes all documents that match the provided query document criteria from the the collection used to + * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in + * the query. + * + * @param query + * @param entityClass + * @param collectionName + * @return + */ + Flux findAllAndRemove(Query query, Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified + * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite + * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the List of converted objects + */ + Flux tail(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified + * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite + * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}. + *

+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + *

+ * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the List of converted objects + */ + Flux tail(Query query, Class entityClass, String collectionName); + + /** + * Returns the underlying {@link MongoConverter}. + * + * @return + */ + MongoConverter getConverter(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java new file mode 100644 index 000000000..fb1230ff4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -0,0 +1,2445 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.SerializationUtils.*; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.ApplicationEventPublisherAware; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.core.convert.ConversionService; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.EntityReader; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Metric; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback; +import org.springframework.data.mongodb.core.convert.DbRefProxyHandler; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DbRefResolverCallback; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; +import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.BasicDBObject; +import com.mongodb.CursorType; +import com.mongodb.DBCollection; +import com.mongodb.DBCursor; +import com.mongodb.DBRef; +import com.mongodb.Mongo; +import com.mongodb.MongoException; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReturnDocument; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; +import com.mongodb.reactivestreams.client.Success; +import com.mongodb.util.JSONParseException; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; + +/** + * Primary implementation of {@link ReactiveMongoOperations}. It simplifies the use of Reactive MongoDB usage and helps + * to avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and + * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindPublisher} and + * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the + * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a + * {@link SimpleReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services as + * bean reference. Note: The {@link SimpleReactiveMongoDatabaseFactory} should always be configured as a bean in the + * application context, in the first case given to the service directly, in the second case to the prepared template. + * + * @author Mark Paluch + * @since 2.0 + */ +public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware { + + public static final DbRefResolver NO_OP_REF_RESOLVER = new NoOpDbRefResolver(); + + private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class); + private static final String ID_FIELD = "_id"; + private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; + private static final Collection> ITERABLE_CLASSES; + + static { + + Set> iterableClasses = new HashSet<>(); + iterableClasses.add(List.class); + iterableClasses.add(Collection.class); + iterableClasses.add(Iterator.class); + iterableClasses.add(Publisher.class); + + ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses); + } + + private final MongoConverter mongoConverter; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final ReactiveMongoDatabaseFactory mongoDatabaseFactory; + private final PersistenceExceptionTranslator exceptionTranslator; + private final QueryMapper queryMapper; + private final UpdateMapper updateMapper; + + private int publisherBatchSize = 10; + private WriteConcern writeConcern; + private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; + private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; + private ReadPreference readPreference; + private ApplicationEventPublisher eventPublisher; + private MongoPersistentEntityIndexCreator indexCreator; + + /** + * Constructor used for a basic template configuration. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + */ + public ReactiveMongoTemplate(MongoClient mongoClient, String databaseName) { + this(new SimpleReactiveMongoDatabaseFactory(mongoClient, databaseName), null); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory) { + this(mongoDatabaseFactory, null); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + * @param mongoConverter + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter) { + + Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null!"); + + this.mongoDatabaseFactory = mongoDatabaseFactory; + this.exceptionTranslator = mongoDatabaseFactory.getExceptionTranslator(); + this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter() : mongoConverter; + this.queryMapper = new QueryMapper(this.mongoConverter); + this.updateMapper = new UpdateMapper(this.mongoConverter); + + // We always have a mapping context in the converter, whether it's a simple one or not + mappingContext = this.mongoConverter.getMappingContext(); + // We create indexes based on mapping events + + if (null != mappingContext && mappingContext instanceof MongoMappingContext) { + indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, + new BlockingIndexOptionsProvider(this)); + eventPublisher = new MongoMappingEventPublisher(indexCreator); + if (mappingContext instanceof ApplicationEventPublisherAware) { + ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + } + } + } + + /** + * Configures the {@link WriteResultChecking} to be used with the template. Setting {@literal null} will reset the + * default of {@link ReactiveMongoTemplate#DEFAULT_WRITE_RESULT_CHECKING}. + * + * @param resultChecking + */ + public void setWriteResultChecking(WriteResultChecking resultChecking) { + this.writeResultChecking = resultChecking == null ? DEFAULT_WRITE_RESULT_CHECKING : resultChecking; + } + + /** + * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} + * configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no + * {@link WriteConcern} will be used. + * + * @param writeConcern + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * Configures the {@link WriteConcernResolver} to be used with the template. + * + * @param writeConcernResolver + */ + public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) { + this.writeConcernResolver = writeConcernResolver; + } + + /** + * Used by @{link {@link #prepareCollection(MongoCollection)} to set the {@link ReadPreference} before any operations + * are performed. + * + * @param readPreference + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + /** + * Used to set a batch size when working with batches of {@link Publisher} emitting items to insert. + * + * @param publisherBatchSize batch size + */ + public void setPublisherBatchSize(int publisherBatchSize) { + this.publisherBatchSize = publisherBatchSize; + } + + /* + * (non-Javadoc) + * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + */ + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + + prepareIndexCreator(applicationContext); + + eventPublisher = applicationContext; + if (mappingContext instanceof ApplicationEventPublisherAware) { + ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + } + } + + /** + * Inspects the given {@link ApplicationContext} for {@link MongoPersistentEntityIndexCreator} and those in turn if + * they were registered for the current {@link MappingContext}. If no creator for the current {@link MappingContext} + * can be found we manually add the internally created one as {@link ApplicationListener} to make sure indexes get + * created appropriately for entity types persisted through this {@link ReactiveMongoTemplate} instance. + * + * @param context must not be {@literal null}. + */ + private void prepareIndexCreator(ApplicationContext context) { + + String[] indexCreators = context.getBeanNamesForType(MongoPersistentEntityIndexCreator.class); + + for (String creator : indexCreators) { + MongoPersistentEntityIndexCreator creatorBean = context.getBean(creator, MongoPersistentEntityIndexCreator.class); + if (creatorBean.isIndexCreatorFor(mappingContext)) { + return; + } + } + + if (context instanceof ConfigurableApplicationContext) { + ((ConfigurableApplicationContext) context).addApplicationListener(indexCreator); + } + } + + /** + * Returns the default {@link MongoConverter}. + * + * @return + */ + public MongoConverter getConverter() { + return this.mongoConverter; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) + */ + public ReactiveIndexOperations reactiveIndexOps(String collectionName) { + return new DefaultReactiveIndexOperations(this, collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) + */ + public ReactiveIndexOperations reactiveIndexOps(Class entityClass) { + return new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) + */ + public IndexOperations indexOps(String collectionName) { + return new BlockingIndexOperations(new DefaultReactiveIndexOperations(this, collectionName)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) + */ + public IndexOperations indexOps(Class entityClass) { + return new BlockingIndexOperations(new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass))); + } + + public String getCollectionName(Class entityClass) { + return this.determineCollectionName(entityClass); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(java.lang.String) + */ + public Mono executeCommand(String jsonCommand) { + + Assert.notNull(jsonCommand, "Command must not be empty!"); + + return executeCommand(Document.parse(jsonCommand)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document) + */ + public Mono executeCommand(final Document command) { + + Assert.notNull(command, "Command must not be null!"); + + return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference) : db.runCommand(command)) + .next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) + */ + public Mono executeCommand(final Document command, final ReadPreference readPreference) { + + Assert.notNull(command, "Command must not be null!"); + + return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference) : db.runCommand(command)) + .next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.ReactiveCollectionCallback) + */ + @Override + public Flux execute(Class entityClass, ReactiveCollectionCallback action) { + return createFlux(determineCollectionName(entityClass), action); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(org.springframework.data.mongodb.core.ReactiveDbCallback) + */ + @Override + public Flux execute(ReactiveDatabaseCallback action) { + return createFlux(action); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.ReactiveCollectionCallback) + */ + public Flux execute(String collectionName, ReactiveCollectionCallback callback) { + Assert.notNull(callback); + return createFlux(collectionName, callback); + } + + /** + * Create a reusable Flux for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Flux} wrapping the {@link ReactiveDatabaseCallback}. + */ + public Flux createFlux(ReactiveDatabaseCallback callback) { + + Assert.notNull(callback); + + return Flux.defer(() -> callback.doInDB(getMongoDatabase())).onErrorResumeWith(translateFluxException()); + } + + /** + * Create a reusable Mono for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Mono} wrapping the {@link ReactiveDatabaseCallback}. + */ + public Mono createMono(final ReactiveDatabaseCallback callback) { + + Assert.notNull(callback); + + return Mono.defer(() -> Mono.from(callback.doInDB(getMongoDatabase()))).otherwise(translateMonoException()); + } + + /** + * Create a reusable {@link Flux} for the {@code collectionName} and {@link ReactiveCollectionCallback}. + * + * @param collectionName must not be empty or {@literal null}. + * @param callback must not be {@literal null}. + * @return a reusable {@link Flux} wrapping the {@link ReactiveCollectionCallback}. + */ + public Flux createFlux(String collectionName, ReactiveCollectionCallback callback) { + + Assert.hasText(collectionName); + Assert.notNull(callback); + + Mono> collectionPublisher = Mono + .fromCallable(() -> getAndPrepareCollection(getMongoDatabase(), collectionName)); + + return collectionPublisher.flatMap(callback::doInCollection).onErrorResumeWith(translateFluxException()); + } + + /** + * Create a reusable {@link Mono} for the {@code collectionName} and {@link ReactiveCollectionCallback}. + * + * @param collectionName must not be empty or {@literal null}. + * @param callback must not be {@literal null}. + * @param + * @return a reusable {@link Mono} wrapping the {@link ReactiveCollectionCallback}. + */ + public Mono createMono(String collectionName, ReactiveCollectionCallback callback) { + + Assert.hasText(collectionName); + Assert.notNull(callback); + + Mono> collectionPublisher = Mono + .fromCallable(() -> getAndPrepareCollection(getMongoDatabase(), collectionName)); + + return collectionPublisher.then(collection -> Mono.from(callback.doInCollection(collection))) + .otherwise(translateMonoException()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class) + */ + public Mono> createCollection(Class entityClass) { + return createCollection(determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) + */ + public Mono> createCollection(Class entityClass, + CollectionOptions collectionOptions) { + return createCollection(determineCollectionName(entityClass), collectionOptions); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String) + */ + public Mono> createCollection(final String collectionName) { + return doCreateCollection(collectionName, new CreateCollectionOptions()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) + */ + public Mono> createCollection(final String collectionName, + final CollectionOptions collectionOptions) { + return doCreateCollection(collectionName, convertToCreateCollectionOptions(collectionOptions)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollection(java.lang.String) + */ + public MongoCollection getCollection(final String collectionName) { + return execute((MongoDatabaseCallback>) db -> db.getCollection(collectionName)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.Class) + */ + public Mono collectionExists(Class entityClass) { + return collectionExists(determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.String) + */ + public Mono collectionExists(final String collectionName) { + return createMono(db -> Flux.from(db.listCollectionNames()) // + .filter(s -> s.equals(collectionName)) // + .map(s -> true) // + .single(false)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.Class) + */ + public Mono dropCollection(Class entityClass) { + return dropCollection(determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.String) + */ + public Mono dropCollection(final String collectionName) { + + return createMono(db -> db.getCollection(collectionName).drop()).doOnSuccess(success -> { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Dropped collection [" + collectionName + "]"); + } + }).then(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollectionNames() + */ + public Flux getCollectionNames() { + return createFlux(MongoDatabase::listCollectionNames); + } + + public MongoDatabase getMongoDatabase() { + return mongoDatabaseFactory.getMongoDatabase(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class) + */ + public Mono findOne(Query query, Class entityClass) { + return findOne(query, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + public Mono findOne(Query query, Class entityClass, String collectionName) { + + if (query.getSortObject() == null) { + return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass); + } + + query.limit(1); + return find(query, entityClass, collectionName).next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class) + */ + public Mono exists(Query query, Class entityClass) { + return exists(query, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.String) + */ + public Mono exists(Query query, String collectionName) { + return exists(query, null, collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + public Mono exists(final Query query, final Class entityClass, String collectionName) { + + if (query == null) { + throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null"); + } + + return createFlux(collectionName, collection -> { + + Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass)); + return collection.find(mappedQuery).limit(1); + }).hasElements(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class) + */ + public Flux find(Query query, Class entityClass) { + return find(query, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + public Flux find(final Query query, Class entityClass, String collectionName) { + + if (query == null) { + return findAll(entityClass, collectionName); + } + + return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + new QueryFindPublisherPreparer(query, entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class) + */ + public Mono findById(Object id, Class entityClass) { + return findById(id, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class, java.lang.String) + */ + public Mono findById(Object id, Class entityClass, String collectionName) { + + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityClass); + MongoPersistentProperty idProperty = persistentEntity == null ? null : persistentEntity.getIdProperty(); + + String idKey = idProperty == null ? ID_FIELD : idProperty.getName(); + + return doFindOne(collectionName, new Document(idKey, id), null, entityClass); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#geoNear(org.springframework.data.mongodb.core.query.NearQuery, java.lang.Class) + */ + @Override + public Flux> geoNear(NearQuery near, Class entityClass) { + return geoNear(near, entityClass, determineCollectionName(entityClass)); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#geoNear(org.springframework.data.mongodb.core.query.NearQuery, java.lang.Class, java.lang.String) + */ + @Override + @SuppressWarnings("unchecked") + public Flux> geoNear(NearQuery near, Class entityClass, String collectionName) { + + if (near == null) { + throw new InvalidDataAccessApiUsageException("NearQuery must not be null!"); + } + + if (entityClass == null) { + throw new InvalidDataAccessApiUsageException("Entity class must not be null!"); + } + + String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(entityClass); + Document nearDbObject = near.toDocument(); + + Document command = new Document("geoNear", collection); + command.putAll(nearDbObject); + + return Flux.defer(() -> { + + if (nearDbObject.containsKey("query")) { + Document query = (Document) nearDbObject.get("query"); + command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(entityClass))); + } + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Executing geoNear using: {} for class: {} in collection: {}", serializeToJsonSafely(command), + entityClass, collectionName); + } + + GeoNearResultDbObjectCallback callback = new GeoNearResultDbObjectCallback( + new ReadDocumentCallback(mongoConverter, entityClass, collectionName), near.getMetric()); + + return executeCommand(command, this.readPreference).flatMap(document -> { + + List l = document.get("results", List.class); + if (l == null) { + return Flux.empty(); + } + return Flux.fromIterable(l); + }).skip(near.getSkip() != null ? near.getSkip() : 0).map(new Function>() { + @Override + public GeoResult apply(Document object) { + return callback.doWith(object); + } + }); + }); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) + */ + public Mono findAndModify(Query query, Update update, Class entityClass) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) + */ + public Mono findAndModify(Query query, Update update, Class entityClass, String collectionName) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class) + */ + public Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass) { + return findAndModify(query, update, options, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class, java.lang.String) + */ + public Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + String collectionName) { + return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), + getMappedSortObject(query, entityClass), entityClass, update, options); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) + */ + public Mono findAndRemove(Query query, Class entityClass) { + return findAndRemove(query, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + public Mono findAndRemove(Query query, Class entityClass, String collectionName) { + + return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), + getMappedSortObject(query, entityClass), entityClass); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class) + */ + public Mono count(Query query, Class entityClass) { + Assert.notNull(entityClass); + return count(query, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.String) + */ + public Mono count(final Query query, String collectionName) { + return count(query, null, collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + public Mono count(final Query query, final Class entityClass, String collectionName) { + + Assert.hasText(collectionName); + + return createMono(collectionName, collection -> { + + final Document Document = query == null ? null + : queryMapper.getMappedObject(query.getQueryObject(), + entityClass == null ? null : mappingContext.getPersistentEntity(entityClass)); + + return collection.count(Document); + }); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono) + */ + @Override + public Mono insert(Mono objectToSave) { + return objectToSave.then(this::insert); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(org.reactivestreams.Publisher, java.lang.Class) + */ + @Override + public Flux insert(Publisher batchToSave, Class entityClass) { + return insert(batchToSave, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(org.reactivestreams.Publisher, java.lang.String) + */ + @Override + public Flux insert(Publisher batchToSave, String collectionName) { + return Flux.from(batchToSave).buffer(publisherBatchSize).flatMap(collection -> insert(collection, collectionName)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.lang.Object) + */ + public Mono insert(T objectToSave) { + + ensureNotIterable(objectToSave); + return insert(objectToSave, determineEntityCollectionName(objectToSave)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.lang.Object, java.lang.String) + */ + public Mono insert(T objectToSave, String collectionName) { + + ensureNotIterable(objectToSave); + return doInsert(collectionName, objectToSave, this.mongoConverter); + } + + protected Mono doInsert(String collectionName, T objectToSave, MongoWriter writer) { + + assertUpdateableIdIfNotSet(objectToSave); + + return Mono.defer(() -> { + + initializeVersionProperty(objectToSave); + maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); + + Document dbDoc = toDbObject(objectToSave, writer); + + maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbDoc, collectionName)); + + Mono afterInsert = insertDBObject(collectionName, dbDoc, objectToSave.getClass()).then(id -> { + populateIdIfNecessary(objectToSave, id); + maybeEmitEvent(new AfterSaveEvent(objectToSave, dbDoc, collectionName)); + return Mono.just(objectToSave); + }); + + return afterInsert; + }); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.Class) + */ + public Flux insert(Collection batchToSave, Class entityClass) { + return doInsertBatch(determineCollectionName(entityClass), batchToSave, this.mongoConverter); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.String) + */ + public Flux insert(Collection batchToSave, String collectionName) { + return doInsertBatch(collectionName, batchToSave, this.mongoConverter); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insertAll(java.util.Collection) + */ + public Flux insertAll(Collection objectsToSave) { + return doInsertAll(objectsToSave, this.mongoConverter); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insertAll(org.reactivestreams.Publisher) + */ + @Override + public Flux insertAll(Publisher objectsToSave) { + return Flux.from(objectsToSave).buffer(publisherBatchSize).flatMap(this::insertAll); + } + + protected Flux doInsertAll(Collection listToSave, MongoWriter writer) { + + final Map> elementsByCollection = new HashMap>(); + + listToSave.forEach(element -> { + MongoPersistentEntity entity = mappingContext.getPersistentEntity(element.getClass()); + + if (entity == null) { + throw new InvalidDataAccessApiUsageException("No PersistentEntity information found for " + element.getClass()); + } + + String collection = entity.getCollection(); + List collectionElements = elementsByCollection.get(collection); + + if (null == collectionElements) { + collectionElements = new ArrayList(); + elementsByCollection.put(collection, collectionElements); + } + + collectionElements.add(element); + }); + + return Flux.fromIterable(elementsByCollection.keySet()) + .flatMap(collectionName -> doInsertBatch(collectionName, elementsByCollection.get(collectionName), writer)); + } + + protected Flux doInsertBatch(final String collectionName, final Collection batchToSave, + final MongoWriter writer) { + + Assert.notNull(writer); + + Mono>> prepareDocuments = Flux.fromIterable(batchToSave) + .flatMap(new Function>>() { + @Override + public Flux> apply(T o) { + + initializeVersionProperty(o); + maybeEmitEvent(new BeforeConvertEvent(o, collectionName)); + + Document dbDoc = toDbObject(o, writer); + + maybeEmitEvent(new BeforeSaveEvent(o, dbDoc, collectionName)); + return Flux.zip(Mono.just(o), Mono.just(dbDoc)); + } + }).collectList(); + + Flux> insertDocuments = prepareDocuments.flatMap(tuples -> { + + List dbObjects = tuples.stream().map(Tuple2::getT2).collect(Collectors.toList()); + + return insertDocumentList(collectionName, dbObjects).thenMany(Flux.fromIterable(tuples)); + }); + + return insertDocuments.map(tuple -> { + + populateIdIfNecessary(tuple.getT1(), tuple.getT2().get(ID_FIELD)); + maybeEmitEvent(new AfterSaveEvent(tuple.getT1(), tuple.getT2(), collectionName)); + return tuple.getT1(); + }); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(reactor.core.publisher.Mono) + */ + @Override + public Mono save(Mono objectToSave) { + return objectToSave.then(this::save); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(reactor.core.publisher.Mono, java.lang.String) + */ + @Override + public Mono save(Mono objectToSave, String collectionName) { + return objectToSave.then(o -> save(o, collectionName)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(java.lang.Object) + */ + public Mono save(T objectToSave) { + + Assert.notNull(objectToSave); + return save(objectToSave, determineEntityCollectionName(objectToSave)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(java.lang.Object, java.lang.String) + */ + public Mono save(T objectToSave, String collectionName) { + + Assert.notNull(objectToSave); + Assert.hasText(collectionName); + + MongoPersistentEntity mongoPersistentEntity = getPersistentEntity(objectToSave.getClass()); + + // No optimistic locking -> simple save + if (mongoPersistentEntity == null || !mongoPersistentEntity.hasVersionProperty()) { + return doSave(collectionName, objectToSave, this.mongoConverter); + } + + return doSaveVersioned(objectToSave, mongoPersistentEntity, collectionName); + } + + private Mono doSaveVersioned(T objectToSave, MongoPersistentEntity entity, String collectionName) { + + return createMono(collectionName, collection -> { + + ConvertingPropertyAccessor convertingAccessor = new ConvertingPropertyAccessor( + entity.getPropertyAccessor(objectToSave), mongoConverter.getConversionService()); + + MongoPersistentProperty idProperty = entity.getIdProperty(); + MongoPersistentProperty versionProperty = entity.getVersionProperty(); + + Object version = convertingAccessor.getProperty(versionProperty); + Number versionNumber = convertingAccessor.getProperty(versionProperty, Number.class); + + // Fresh instance -> initialize version property + if (version == null) { + return doInsert(collectionName, objectToSave, mongoConverter); + } + + ReactiveMongoTemplate.this.assertUpdateableIdIfNotSet(objectToSave); + + // Create query for entity with the id and old version + Object id = convertingAccessor.getProperty(idProperty); + Query query = new Query(Criteria.where(idProperty.getName()).is(id).and(versionProperty.getName()).is(version)); + + // Bump version number + convertingAccessor.setProperty(versionProperty, versionNumber.longValue() + 1); + + ReactiveMongoTemplate.this.maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); + + Document document = ReactiveMongoTemplate.this.toDbObject(objectToSave, mongoConverter); + + ReactiveMongoTemplate.this.maybeEmitEvent(new BeforeSaveEvent(objectToSave, document, collectionName)); + Update update = Update.fromDocument(document, ID_FIELD); + + return doUpdate(collectionName, query, update, objectToSave.getClass(), false, false).map(updateResult -> { + + maybeEmitEvent(new AfterSaveEvent(objectToSave, document, collectionName)); + return objectToSave; + }); + }); + } + + protected Mono doSave(String collectionName, T objectToSave, MongoWriter writer) { + + assertUpdateableIdIfNotSet(objectToSave); + + return createMono(collectionName, collection -> { + + maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); + Document dbDoc = toDbObject(objectToSave, writer); + maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbDoc, collectionName)); + + return saveDocument(collectionName, dbDoc, objectToSave.getClass()).map(id -> { + + populateIdIfNecessary(objectToSave, id); + maybeEmitEvent(new AfterSaveEvent(objectToSave, dbDoc, collectionName)); + return objectToSave; + }); + }); + } + + protected Mono insertDBObject(final String collectionName, final Document dbDoc, final Class entityClass) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName); + } + + final Document document = new Document(dbDoc); + Flux execute = execute(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, + dbDoc, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + + return collectionToUse.insertOne(document); + }); + + return Flux.from(execute).last().map(success -> document.get(ID_FIELD)); + } + + protected Flux insertDocumentList(final String collectionName, final List dbDocList) { + + if (dbDocList.isEmpty()) { + return Flux.empty(); + } + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Inserting list of DBObjects containing " + dbDocList.size() + " items"); + } + + final List documents = new ArrayList<>(); + + return execute(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT_LIST, collectionName, null, + null, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + + documents.addAll(toDocuments(dbDocList)); + + return collectionToUse.insertMany(documents); + }).flatMap(s -> { + + List documentsWithIds = documents.stream() + .filter(document -> document.get(ID_FIELD) instanceof ObjectId).collect(Collectors.toList()); + return Flux.fromIterable(documentsWithIds); + }).map(document -> document.get(ID_FIELD, ObjectId.class)); + } + + private MongoCollection prepareCollection(MongoCollection collection, + WriteConcern writeConcernToUse) { + MongoCollection collectionToUse = collection; + + if (writeConcernToUse != null) { + collectionToUse = collectionToUse.withWriteConcern(writeConcernToUse); + } + return collectionToUse; + } + + protected Mono saveDocument(final String collectionName, final Document dbDoc, final Class entityClass) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Saving Document containing fields: " + dbDoc.keySet()); + } + + return createMono(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, + dbDoc, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + Publisher publisher; + if (!dbDoc.containsKey(ID_FIELD)) { + if (writeConcernToUse == null) { + publisher = collection.insertOne(dbDoc); + } else { + publisher = collection.withWriteConcern(writeConcernToUse).insertOne(dbDoc); + } + } else if (writeConcernToUse == null) { + publisher = collection.replaceOne(Filters.eq(ID_FIELD, dbDoc.get(ID_FIELD)), dbDoc, + new UpdateOptions().upsert(true)); + } else { + publisher = collection.withWriteConcern(writeConcernToUse).replaceOne(Filters.eq(ID_FIELD, dbDoc.get(ID_FIELD)), + dbDoc, new UpdateOptions().upsert(true)); + } + + return Mono.from(publisher).map(o -> dbDoc.get(ID_FIELD)); + }); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) + */ + public Mono upsert(Query query, Update update, Class entityClass) { + return doUpdate(determineCollectionName(entityClass), query, update, entityClass, true, false); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) + */ + public Mono upsert(Query query, Update update, String collectionName) { + return doUpdate(collectionName, query, update, null, true, false); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) + */ + public Mono upsert(Query query, Update update, Class entityClass, String collectionName) { + return doUpdate(collectionName, query, update, entityClass, true, false); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) + */ + public Mono updateFirst(Query query, Update update, Class entityClass) { + return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, false); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) + */ + public Mono updateFirst(final Query query, final Update update, final String collectionName) { + return doUpdate(collectionName, query, update, null, false, false); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) + */ + public Mono updateFirst(Query query, Update update, Class entityClass, String collectionName) { + return doUpdate(collectionName, query, update, entityClass, false, false); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) + */ + public Mono updateMulti(Query query, Update update, Class entityClass) { + return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, true); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) + */ + public Mono updateMulti(final Query query, final Update update, String collectionName) { + return doUpdate(collectionName, query, update, null, false, true); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) + */ + public Mono updateMulti(final Query query, final Update update, Class entityClass, + String collectionName) { + return doUpdate(collectionName, query, update, entityClass, false, true); + } + + protected Mono doUpdate(final String collectionName, final Query query, final Update update, + final Class entityClass, final boolean upsert, final boolean multi) { + + MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); + + Flux result = execute(collectionName, collection -> { + + increaseVersionForUpdateIfNecessary(entity, update); + + Document queryObj = query == null ? new Document() : queryMapper.getMappedObject(query.getQueryObject(), entity); + Document updateObj = update == null ? new Document() + : updateMapper.getMappedObject(update.getUpdateObject(), entity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); + } + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + updateObj, queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + + UpdateOptions updateOptions = new UpdateOptions().upsert(upsert); + + if (!UpdateMapper.isUpdateObject(updateObj)) { + return collectionToUse.replaceOne(queryObj, updateObj, updateOptions); + } + if (multi) { + return collectionToUse.updateMany(queryObj, updateObj, updateOptions); + } + return collectionToUse.updateOne(queryObj, updateObj, updateOptions); + }).doOnNext(updateResult -> { + + if (entity != null && entity.hasVersionProperty() && !multi) { + if (updateResult.wasAcknowledged() && updateResult.getMatchedCount() == 0) { + + Document queryObj = query == null ? new Document() + : queryMapper.getMappedObject(query.getQueryObject(), entity); + Document updateObj = update == null ? new Document() + : updateMapper.getMappedObject(update.getUpdateObject(), entity); + if (dbObjectContainsVersionProperty(queryObj, entity)) + throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: " + + updateObj.toString() + " to collection " + collectionName); + } + } + }); + + return result.next(); + } + + private void increaseVersionForUpdateIfNecessary(MongoPersistentEntity persistentEntity, Update update) { + + if (persistentEntity != null && persistentEntity.hasVersionProperty()) { + String versionFieldName = persistentEntity.getVersionProperty().getFieldName(); + if (!update.modifies(versionFieldName)) { + update.inc(versionFieldName, 1L); + } + } + } + + private boolean dbObjectContainsVersionProperty(Document document, MongoPersistentEntity persistentEntity) { + + if (persistentEntity == null || !persistentEntity.hasVersionProperty()) { + return false; + } + + return document.containsKey(persistentEntity.getVersionProperty().getFieldName()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(reactor.core.publisher.Mono) + */ + @Override + public Mono remove(Mono objectToRemove) { + return objectToRemove.then(this::remove); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(reactor.core.publisher.Mono, java.lang.String) + */ + @Override + public Mono remove(Mono objectToRemove, String collection) { + return objectToRemove.then(o -> remove(objectToRemove, collection)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(java.lang.Object) + */ + public Mono remove(Object object) { + + if (object == null) { + return null; + } + + return remove(getIdQueryFor(object), object.getClass()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(java.lang.Object, java.lang.String) + */ + public Mono remove(Object object, String collection) { + + Assert.hasText(collection); + + if (object == null) { + return null; + } + + return doRemove(collection, getIdQueryFor(object), object.getClass()); + } + + /** + * Returns {@link Entry} containing the field name of the id property as {@link Entry#getKey()} and the {@link Id}s + * property value as its {@link Entry#getValue()}. + * + * @param object + * @return + */ + private Entry extractIdPropertyAndValue(Object object) { + + Assert.notNull(object, "Id cannot be extracted from 'null'."); + + Class objectType = object.getClass(); + + if (object instanceof Document) { + return Collections.singletonMap(ID_FIELD, ((Document) object).get(ID_FIELD)).entrySet().iterator().next(); + } + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(objectType); + MongoPersistentProperty idProp = entity == null ? null : entity.getIdProperty(); + + if (idProp == null) { + throw new MappingException("No id property found for object of type " + objectType); + } + + Object idValue = entity.getPropertyAccessor(object).getProperty(idProp); + return Collections.singletonMap(idProp.getFieldName(), idValue).entrySet().iterator().next(); + } + + /** + * Returns a {@link Query} for the given entity by its id. + * + * @param object must not be {@literal null}. + * @return + */ + private Query getIdQueryFor(Object object) { + + Entry id = extractIdPropertyAndValue(object); + return new Query(where(id.getKey()).is(id.getValue())); + } + + /** + * Returns a {@link Query} for the given entities by their ids. + * + * @param objects must not be {@literal null} or {@literal empty}. + * @return + */ + private Query getIdInQueryFor(Collection objects) { + + Assert.notEmpty(objects, "Cannot create Query for empty collection."); + + Iterator it = objects.iterator(); + Entry firstEntry = extractIdPropertyAndValue(it.next()); + + ArrayList ids = new ArrayList(objects.size()); + ids.add(firstEntry.getValue()); + + while (it.hasNext()) { + ids.add(extractIdPropertyAndValue(it.next()).getValue()); + } + + return new Query(where(firstEntry.getKey()).in(ids)); + } + + private void assertUpdateableIdIfNotSet(Object entity) { + + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entity.getClass()); + MongoPersistentProperty idProperty = persistentEntity == null ? null : persistentEntity.getIdProperty(); + + if (idProperty == null) { + return; + } + + Object idValue = persistentEntity.getPropertyAccessor(entity).getProperty(idProperty); + + if (idValue == null && !MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(idProperty.getType())) { + throw new InvalidDataAccessApiUsageException( + String.format("Cannot autogenerate id of type %s for entity of type %s!", idProperty.getType().getName(), + entity.getClass().getName())); + } + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.String) + */ + public Mono remove(Query query, String collectionName) { + return remove(query, null, collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) + */ + public Mono remove(Query query, Class entityClass) { + return remove(query, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + public Mono remove(Query query, Class entityClass, String collectionName) { + return doRemove(collectionName, query, entityClass); + } + + protected Mono doRemove(final String collectionName, final Query query, + final Class entityClass) { + + if (query == null) { + throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null!"); + } + + Assert.hasText(collectionName, "Collection name must not be null or empty!"); + + final Document queryObject = query.getQueryObject(); + final MongoPersistentEntity entity = getPersistentEntity(entityClass); + + return execute(collectionName, collection -> { + + maybeEmitEvent(new BeforeDeleteEvent(queryObject, entityClass, collectionName)); + + Document dboq = queryMapper.getMappedObject(queryObject, entity); + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, + null, queryObject); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Remove using query: {} in collection: {}.", + new Object[]{serializeToJsonSafely(dboq), collectionName}); + } + + return collectionToUse.deleteMany(dboq); + }).doOnNext(deleteResult -> maybeEmitEvent(new AfterDeleteEvent(queryObject, entityClass, collectionName))) + .next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class) + */ + public Flux findAll(Class entityClass) { + return findAll(entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class, java.lang.String) + */ + public Flux findAll(Class entityClass, String collectionName) { + return executeFindMultiInternal(new FindCallback(null), null, + new ReadDocumentCallback(mongoConverter, entityClass, collectionName), collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String) + */ + @Override + public Flux findAllAndRemove(Query query, String collectionName) { + + return findAllAndRemove(query, null, collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) + */ + @Override + public Flux findAllAndRemove(Query query, Class entityClass) { + return findAllAndRemove(query, entityClass, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + @Override + public Flux findAllAndRemove(Query query, Class entityClass, String collectionName) { + return doFindAndDelete(collectionName, query, entityClass); + } + + @Override + public Flux tail(Query query, Class entityClass) { + return tail(query, entityClass, determineCollectionName(entityClass)); + } + + @Override + public Flux tail(Query query, Class entityClass, String collectionName) { + + if (query == null) { + + // TODO: clean up + LOGGER.debug(String.format("find for class: %s in collection: %s", entityClass, collectionName)); + + return executeFindMultiInternal( + collection -> new FindCallback(null).doInCollection(collection).cursorType(CursorType.TailableAwait), null, + new ReadDocumentCallback(mongoConverter, entityClass, collectionName), collectionName); + } + + return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + new TailingQueryFindPublisherPreparer(query, entityClass)); + } + + /** + * Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)} + * and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is + * constructed out of the find result. + * + * @param collectionName + * @param query + * @param entityClass + * @return + */ + protected Flux doFindAndDelete(String collectionName, Query query, Class entityClass) { + + Flux flux = find(query, entityClass, collectionName); + + return Flux.from(flux).collectList() + .flatMap(list -> Flux.from(remove(getIdInQueryFor(list), entityClass, collectionName)) + .flatMap(deleteResult -> Flux.fromIterable(list))); + } + + /** + * Create the specified collection using the provided options + * + * @param collectionName + * @param collectionOptions + * @return the collection that was created + */ + protected Mono> doCreateCollection(final String collectionName, + final CreateCollectionOptions collectionOptions) { + + return createMono(db -> db.createCollection(collectionName, collectionOptions)).map(success -> { + + // TODO: Emit a collection created event + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Created collection [{}]", collectionName); + } + return getCollection(collectionName); + }); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The query document is specified as a standard {@link Document} and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param entityClass the parameterized type of the returned list. + * @return the {@link List} of converted objects. + */ + protected Mono doFindOne(String collectionName, Document query, Document fields, Class entityClass) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + Document mappedQuery = queryMapper.getMappedObject(query, entity); + Document mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); + } + + return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), + new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName), collectionName); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to a List using the template's converter. The + * query document is specified as a standard Document and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from + * @param query the query document that specifies the criteria used to find a record + * @param fields the document that specifies the fields to be returned + * @param entityClass the parameterized type of the returned list. + * @return the List of converted objects. + */ + protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass) { + return doFind(collectionName, query, fields, entityClass, null, + new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName)); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified type. The object is + * converted from the MongoDB native representation using an instance of {@see MongoConverter}. The query document is + * specified as a standard Document and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param entityClass the parameterized type of the returned list. + * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply + * limits, skips and so on). + * @return the {@link List} of converted objects. + */ + protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass, + FindPublisherPreparer preparer) { + return doFind(collectionName, query, fields, entityClass, preparer, + new ReadDocumentCallback(mongoConverter, entityClass, collectionName)); + } + + protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass, + FindPublisherPreparer preparer, DocumentCallback objectCallback) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + + Document mappedFields = queryMapper.getMappedFields(fields, entity); + Document mappedQuery = queryMapper.getMappedObject(query, entity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName)); + } + + return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback, + collectionName); + } + + protected CreateCollectionOptions convertToCreateCollectionOptions(CollectionOptions collectionOptions) { + + CreateCollectionOptions result = new CreateCollectionOptions(); + if (collectionOptions != null) { + + if (collectionOptions.getCapped() != null) { + result = result.capped(collectionOptions.getCapped()); + } + + if (collectionOptions.getSize() != null) { + result = result.sizeInBytes(collectionOptions.getSize()); + } + + if (collectionOptions.getMaxDocuments() != null) { + result = result.maxDocuments(collectionOptions.getMaxDocuments()); + } + } + return result; + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The first document that matches the query is returned and also removed from the collection in the database. + *

+ * The query document is specified as a standard Document and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from + * @param query the query document that specifies the criteria used to find a record + * @param entityClass the parameterized type of the returned list. + * @return the List of converted objects. + */ + protected Mono doFindAndRemove(String collectionName, Document query, Document fields, Document sort, + Class entityClass) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(query), fields, sort, entityClass, collectionName)); + } + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + + return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort), + new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName), collectionName); + } + + protected Mono doFindAndModify(String collectionName, Document query, Document fields, Document sort, + Class entityClass, Update update, FindAndModifyOptions options) { + + FindAndModifyOptions optionsToUse; + if (options == null) { + optionsToUse = new FindAndModifyOptions(); + } else { + optionsToUse = options; + } + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + + return Mono.defer(() -> { + + increaseVersionForUpdateIfNecessary(entity, update); + + Document mappedQuery = queryMapper.getMappedObject(query, entity); + Document mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity); + + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug( + String.format( + "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate), + collectionName)); + } + + return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, optionsToUse), + new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName), collectionName); + }); + } + + protected void maybeEmitEvent(MongoMappingEvent event) { + if (null != eventPublisher) { + eventPublisher.publishEvent(event); + } + } + + /** + * Populates the id property of the saved object, if it's not set already. + * + * @param savedObject + * @param id + */ + private void populateIdIfNecessary(Object savedObject, Object id) { + + if (id == null) { + return; + } + + if (savedObject instanceof Document) { + Document Document = (Document) savedObject; + Document.put(ID_FIELD, id); + return; + } + + MongoPersistentProperty idProp = getIdPropertyFor(savedObject.getClass()); + + if (idProp == null) { + return; + } + + ConversionService conversionService = mongoConverter.getConversionService(); + MongoPersistentEntity entity = mappingContext.getPersistentEntity(savedObject.getClass()); + PersistentPropertyAccessor accessor = entity.getPropertyAccessor(savedObject); + + if (accessor.getProperty(idProp) != null) { + return; + } + + new ConvertingPropertyAccessor(accessor, conversionService).setProperty(idProp, id); + } + + private MongoCollection getAndPrepareCollection(MongoDatabase db, String collectionName) { + + try { + MongoCollection collection = db.getCollection(collectionName); + return prepareCollection(collection); + } catch (RuntimeException e) { + throw potentiallyConvertRuntimeException(e, exceptionTranslator); + } + } + + protected void ensureNotIterable(Object o) { + if (null != o) { + + boolean isIterable = o.getClass().isArray(); + + if (!isIterable) { + for (Class iterableClass : ITERABLE_CLASSES) { + if (iterableClass.isAssignableFrom(o.getClass()) || o.getClass().getName().equals(iterableClass.getName())) { + isIterable = true; + break; + } + } + } + + if (isIterable) { + throw new IllegalArgumentException("Cannot use a collection here."); + } + } + } + + /** + * Prepare the collection before any processing is done using it. This allows a convenient way to apply settings like + * slaveOk() etc. Can be overridden in sub-classes. + * + * @param collection + */ + protected MongoCollection prepareCollection(MongoCollection collection) { + if (this.readPreference != null) { + return collection.withReadPreference(readPreference); + } + return collection; + } + + /** + * Prepare the WriteConcern before any processing is done using it. This allows a convenient way to apply custom + * settings in sub-classes.
+ * The returned {@link WriteConcern} will be defaulted to {@link WriteConcern#ACKNOWLEDGED} when + * {@link WriteResultChecking} is set to {@link WriteResultChecking#EXCEPTION}. + * + * @param mongoAction any WriteConcern already configured or null + * @return The prepared WriteConcern or null + * @see #setWriteConcern(WriteConcern) + * @see #setWriteConcernResolver(WriteConcernResolver) + */ + protected WriteConcern prepareWriteConcern(MongoAction mongoAction) { + + WriteConcern wc = writeConcernResolver.resolve(mongoAction); + return potentiallyForceAcknowledgedWrite(wc); + } + + private WriteConcern potentiallyForceAcknowledgedWrite(WriteConcern wc) { + + if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking) + && MongoClientVersion.isMongo3Driver()) { + if (wc == null || wc.getWObject() == null + || (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) { + return WriteConcern.ACKNOWLEDGED; + } + } + return wc; + } + + /** + * Internal method using callbacks to do queries against the datastore that requires reading a single object from a + * collection of objects. It will take the following steps + *

    + *
  1. Execute the given {@link ReactiveCollectionCallback} for a {@link Document}.
  2. + *
  3. Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.
  4. + *
      + * + * @param collectionCallback the callback to retrieve the {@link Document} + * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type + * @param collectionName the collection to be queried + * @return + */ + private Mono executeFindOneInternal(ReactiveCollectionCallback collectionCallback, + DocumentCallback objectCallback, String collectionName) { + + return createMono(collectionName, + collection -> Mono.from(collectionCallback.doInCollection(collection)).map(objectCallback::doWith)); + } + + /** + * Internal method using callback to do queries against the datastore that requires reading a collection of objects. + * It will take the following steps + *
        + *
      1. Execute the given {@link ReactiveCollectionCallback} for a {@link FindPublisher}.
      2. + *
      3. Prepare that {@link FindPublisher} with the given {@link FindPublisherPreparer} (will be skipped if + * {@link FindPublisherPreparer} is {@literal null}
      4. + *
      5. Apply the given {@link DocumentCallback} in {@link Flux#map(Function)} of {@link FindPublisher}
      6. + *
          + * + * @param collectionCallback the callback to retrieve the {@link FindPublisher} with, must not be {@literal null}. + * @param preparer the {@link FindPublisherPreparer} to potentially modify the {@link FindPublisher} before iterating + * over it, may be {@literal null} + * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type, must + * not be {@literal null}. + * @param collectionName the collection to be queried, must not be {@literal null}. + * @return + */ + private Flux executeFindMultiInternal(ReactiveCollectionQueryCallback collectionCallback, + FindPublisherPreparer preparer, DocumentCallback objectCallback, String collectionName) { + + return createFlux(collectionName, collection -> { + + FindPublisher findPublisher = collectionCallback.doInCollection(collection); + + if (preparer != null) { + findPublisher = preparer.prepare(findPublisher); + } + return Flux.from(findPublisher).map(objectCallback::doWith); + }); + } + + private T execute(MongoDatabaseCallback action) { + + Assert.notNull(action); + + try { + MongoDatabase db = this.getMongoDatabase(); + return action.doInDatabase(db); + } catch (RuntimeException e) { + throw potentiallyConvertRuntimeException(e, exceptionTranslator); + } + } + + /** + * Exception translation {@link Function} intended for {@link Flux#onErrorResumeWith(Function)} usage. + * + * @return the exception translation {@link Function} + */ + private Function> translateFluxException() { + + return throwable -> { + + if (throwable instanceof RuntimeException) { + return Flux.error(potentiallyConvertRuntimeException((RuntimeException) throwable, exceptionTranslator)); + } + + return Flux.error(throwable); + }; + } + + /** + * Exception translation {@link Function} intended for {@link Mono#otherwise(Function)} usage. + * + * @return the exception translation {@link Function} + */ + private Function> translateMonoException() { + + return throwable -> { + + if (throwable instanceof RuntimeException) { + return Mono.error(potentiallyConvertRuntimeException((RuntimeException) throwable, exceptionTranslator)); + } + + return Mono.error(throwable); + }; + } + + /** + * Tries to convert the given {@link RuntimeException} into a {@link DataAccessException} but returns the original + * exception if the conversation failed. Thus allows safe re-throwing of the return value. + * + * @param ex the exception to translate + * @param exceptionTranslator the {@link PersistenceExceptionTranslator} to be used for translation + * @return + */ + private static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, + PersistenceExceptionTranslator exceptionTranslator) { + RuntimeException resolved = exceptionTranslator.translateExceptionIfPossible(ex); + return resolved == null ? ex : resolved; + } + + private MongoPersistentEntity getPersistentEntity(Class type) { + return type == null ? null : mappingContext.getPersistentEntity(type); + } + + private MongoPersistentProperty getIdPropertyFor(Class type) { + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(type); + return persistentEntity == null ? null : persistentEntity.getIdProperty(); + } + + private String determineEntityCollectionName(T obj) { + + if (null != obj) { + return determineCollectionName(obj.getClass()); + } + + return null; + } + + String determineCollectionName(Class entityClass) { + + if (entityClass == null) { + throw new InvalidDataAccessApiUsageException( + "No class parameter provided, entity collection can't be determined!"); + } + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + + if (entity == null) { + throw new InvalidDataAccessApiUsageException( + "No Persistent Entity information found for the class " + entityClass.getName()); + } + + return entity.getCollection(); + } + + private static MappingMongoConverter getDefaultMongoConverter() { + + MappingMongoConverter converter = new MappingMongoConverter(NO_OP_REF_RESOLVER, new MongoMappingContext()); + converter.afterPropertiesSet(); + return converter; + } + + private Document getMappedSortObject(Query query, Class type) { + + if (query == null || query.getSortObject() == null) { + return null; + } + + return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type)); + } + + /** + * @param objectToSave + * @param writer + * @return + */ + private Document toDbObject(T objectToSave, MongoWriter writer) { + + if (objectToSave instanceof Document) { + return (Document) objectToSave; + } + + if (!(objectToSave instanceof String)) { + Document dbDoc = new Document(); + writer.write(objectToSave, dbDoc); + + if (dbDoc.containsKey(ID_FIELD) && dbDoc.get(ID_FIELD) == null) { + dbDoc.remove(ID_FIELD); + } + return dbDoc; + } else { + try { + return Document.parse((String) objectToSave); + } catch (JSONParseException | org.bson.json.JsonParseException e) { + throw new MappingException("Could not parse given String to save into a JSON document!", e); + } + } + } + + private void initializeVersionProperty(Object entity) { + + MongoPersistentEntity mongoPersistentEntity = getPersistentEntity(entity.getClass()); + + if (mongoPersistentEntity != null && mongoPersistentEntity.hasVersionProperty()) { + ConvertingPropertyAccessor accessor = new ConvertingPropertyAccessor( + mongoPersistentEntity.getPropertyAccessor(entity), mongoConverter.getConversionService()); + accessor.setProperty(mongoPersistentEntity.getVersionProperty(), 0); + } + } + + // Callback implementations + + /** + * Simple {@link ReactiveCollectionCallback} that takes a query {@link Document} plus an optional fields specification + * {@link Document} and executes that against the {@link DBCollection}. + * + * @author Oliver Gierke + * @author Thomas Risberg + */ + private static class FindOneCallback implements ReactiveCollectionCallback { + + private final Document query; + private final Document fields; + + FindOneCallback(Document query, Document fields) { + this.query = query; + this.fields = fields; + } + + @Override + public Publisher doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + if (fields == null) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("findOne using query: %s in db.collection: %s", serializeToJsonSafely(query), + collection.getNamespace().getFullName())); + } + + return collection.find(query).limit(1).first(); + } else { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("findOne using query: %s fields: %s in db.collection: %s", + serializeToJsonSafely(query), fields, collection.getNamespace().getFullName())); + } + + return collection.find(query).projection(fields).limit(1); + } + } + } + + /** + * Simple {@link ReactiveCollectionQueryCallback} that takes a query {@link Document} plus an optional fields + * specification {@link Document} and executes that against the {@link MongoCollection}. + * + * @author Mark Paluch + */ + private static class FindCallback implements ReactiveCollectionQueryCallback { + + private final Document query; + private final Document fields; + + FindCallback(Document query) { + this(query, null); + } + + FindCallback(Document query, Document fields) { + this.query = query; + this.fields = fields; + } + + @Override + public FindPublisher doInCollection(MongoCollection collection) { + + FindPublisher findPublisher; + if (query == null || query.isEmpty()) { + findPublisher = collection.find(); + } else { + findPublisher = collection.find(query); + } + + if (fields == null || fields.isEmpty()) { + return findPublisher; + } else { + return findPublisher.projection(fields); + } + } + } + + /** + * Simple {@link ReactiveCollectionCallback} that takes a query {@link Document} plus an optional fields specification + * {@link Document} and executes that against the {@link MongoCollection}. + * + * @author Mark Paluch + */ + private static class FindAndRemoveCallback implements ReactiveCollectionCallback { + + private final Document query; + private final Document fields; + private final Document sort; + + FindAndRemoveCallback(Document query, Document fields, Document sort) { + + this.query = query; + this.fields = fields; + this.sort = sort; + } + + @Override + public Publisher doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); + return collection.findOneAndDelete(query, findOneAndDeleteOptions); + } + } + + /** + * @author Mark Paluch + */ + private static class FindAndModifyCallback implements ReactiveCollectionCallback { + + private final Document query; + private final Document fields; + private final Document sort; + private final Document update; + private final FindAndModifyOptions options; + + FindAndModifyCallback(Document query, Document fields, Document sort, Document update, + FindAndModifyOptions options) { + + this.query = query; + this.fields = fields; + this.sort = sort; + this.update = update; + this.options = options; + } + + @Override + public Publisher doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + if (options.isRemove()) { + FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); + return collection.findOneAndDelete(query, findOneAndDeleteOptions); + } + + FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort); + return collection.findOneAndUpdate(query, update, findOneAndUpdateOptions); + } + + private FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOptions options, Document fields, + Document sort) { + + FindOneAndUpdateOptions result = new FindOneAndUpdateOptions(); + + result = result.projection(fields).sort(sort).upsert(options.isUpsert()); + + if (options.isReturnNew()) { + result = result.returnDocument(ReturnDocument.AFTER); + } else { + result = result.returnDocument(ReturnDocument.BEFORE); + } + + return result; + } + } + + private static FindOneAndDeleteOptions convertToFindOneAndDeleteOptions(Document fields, Document sort) { + + FindOneAndDeleteOptions result = new FindOneAndDeleteOptions(); + result = result.projection(fields).sort(sort); + + return result; + } + + /** + * Simple internal callback to allow operations on a {@link Document}. + * + * @author Mark Paluch + */ + + interface DocumentCallback { + + T doWith(Document object); + } + + /** + * Simple internal callback to allow operations on a {@link MongoDatabase}. + * + * @author Mark Paluch + */ + + interface MongoDatabaseCallback { + + T doInDatabase(MongoDatabase db); + } + + /** + * Simple internal callback to allow operations on a {@link MongoDatabase}. + * + * @author Mark Paluch + */ + + interface ReactiveCollectionQueryCallback extends ReactiveCollectionCallback { + + FindPublisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; + } + + /** + * Simple {@link DocumentCallback} that will transform {@link Document} into the given target type using the given + * {@link EntityReader}. + * + * @author Mark Paluch + */ + private class ReadDocumentCallback implements DocumentCallback { + + private final EntityReader reader; + private final Class type; + private final String collectionName; + + ReadDocumentCallback(EntityReader reader, Class type, String collectionName) { + + Assert.notNull(reader); + Assert.notNull(type); + this.reader = reader; + this.type = type; + this.collectionName = collectionName; + } + + public T doWith(Document object) { + if (null != object) { + maybeEmitEvent(new AfterLoadEvent(object, type, collectionName)); + } + T source = reader.read(type, object); + if (null != source) { + maybeEmitEvent(new AfterConvertEvent(object, source, collectionName)); + } + return source; + } + } + + /** + * {@link DocumentCallback} that assumes a {@link GeoResult} to be created, delegates actual content unmarshalling to + * a delegate and creates a {@link GeoResult} from the result. + * + * @author Mark Paluch + */ + static class GeoNearResultDbObjectCallback implements DocumentCallback> { + + private final DocumentCallback delegate; + private final Metric metric; + + /** + * Creates a new {@link GeoNearResultDbObjectCallback} using the given {@link DbObjectCallback} delegate for + * {@link GeoResult} content unmarshalling. + * + * @param delegate must not be {@literal null}. + */ + GeoNearResultDbObjectCallback(DocumentCallback delegate, Metric metric) { + + Assert.notNull(delegate); + this.delegate = delegate; + this.metric = metric; + } + + public GeoResult doWith(Document object) { + + double distance = (Double) object.get("dis"); + Document content = (Document) object.get("obj"); + + T doWith = delegate.doWith(content); + + return new GeoResult(doWith, new Distance(distance, metric)); + } + } + + /** + * @author Mark Paluch + */ + class QueryFindPublisherPreparer implements FindPublisherPreparer { + + private final Query query; + private final Class type; + + QueryFindPublisherPreparer(Query query, Class type) { + + this.query = query; + this.type = type; + } + + public FindPublisher prepare(FindPublisher cursor) { + + if (query == null) { + return cursor; + } + + if (query.getSkip() <= 0 && query.getLimit() <= 0 && query.getSortObject() == null + && !StringUtils.hasText(query.getHint()) && !query.getMeta().hasValues()) { + return cursor; + } + + FindPublisher cursorToUse = cursor; + + try { + if (query.getSkip() > 0) { + cursorToUse = cursorToUse.skip(query.getSkip()); + } + if (query.getLimit() > 0) { + cursorToUse = cursorToUse.limit(query.getLimit()); + } + if (query.getSortObject() != null) { + Document sortDbo = type != null ? getMappedSortObject(query, type) : query.getSortObject(); + cursorToUse = cursorToUse.sort(sortDbo); + } + BasicDBObject modifiers = new BasicDBObject(); + + if (StringUtils.hasText(query.getHint())) { + modifiers.append("$hint", query.getHint()); + } + + if (query.getMeta().hasValues()) { + for (Entry entry : query.getMeta().values()) { + modifiers.append(entry.getKey(), entry.getValue()); + } + } + + if (!modifiers.isEmpty()) { + cursorToUse = cursorToUse.modifiers(modifiers); + } + } catch (RuntimeException e) { + throw potentiallyConvertRuntimeException(e, exceptionTranslator); + } + + return cursorToUse; + } + } + + class TailingQueryFindPublisherPreparer extends QueryFindPublisherPreparer { + + TailingQueryFindPublisherPreparer(Query query, Class type) { + super(query, type); + } + + @Override + public FindPublisher prepare(FindPublisher cursor) { + return super.prepare(cursor.cursorType(CursorType.TailableAwait)); + } + } + + private static List toDocuments(final Collection documents) { + return new ArrayList<>(documents); + } + + /** + * No-Operation {@link org.springframework.data.mongodb.core.mapping.DBRef} resolver. + * + * @author Mark Paluch + */ + static class NoOpDbRefResolver implements DbRefResolver { + + @Override + public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, + DbRefProxyHandler proxyHandler) { + return null; + } + + @Override + public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, + MongoPersistentEntity entity, Object id) { + return null; + } + + @Override + public Document fetch(DBRef dbRef) { + return null; + } + + @Override + public List bulkFetch(List dbRefs) { + return null; + } + } + + /** + * @author Mark Paluch + */ + private static class BlockingIndexOptionsProvider implements IndexOperationsProvider { + + private final ReactiveMongoOperations mongoOperations; + + public BlockingIndexOptionsProvider(ReactiveMongoOperations mongoOperations) { + this.mongoOperations = mongoOperations; + } + + @Override + public IndexOperations indexOps(String collectionName) { + return new BlockingIndexOperations(mongoOperations.reactiveIndexOps(collectionName)); + } + } + + /** + * Blocking {@link IndexOperations} overlay to synchronize calls. + * + * @author Mark Paluch + */ + private static class BlockingIndexOperations implements IndexOperations { + + private final ReactiveIndexOperations reactiveIndexOperations; + + public BlockingIndexOperations(ReactiveIndexOperations reactiveIndexOperations) { + this.reactiveIndexOperations = reactiveIndexOperations; + } + + @Override + public void ensureIndex(IndexDefinition indexDefinition) { + reactiveIndexOperations.ensureIndex(indexDefinition).block(); + } + + @Override + public void dropIndex(String name) { + reactiveIndexOperations.dropIndex(name).block(); + } + + @Override + public void dropAllIndexes() { + reactiveIndexOperations.dropAllIndexes().block(); + } + + @Override + public List getIndexInfo() { + return reactiveIndexOperations.getIndexInfo().collectList().block(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java new file mode 100644 index 000000000..78431860b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java @@ -0,0 +1,134 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.net.UnknownHostException; + +import org.springframework.beans.factory.DisposableBean; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.util.Assert; + +import com.mongodb.ConnectionString; +import com.mongodb.WriteConcern; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance. + * + * @author Mark Paluch + * @since 2.0 + */ +public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, ReactiveMongoDatabaseFactory { + + private final MongoClient mongo; + private final String databaseName; + private final boolean mongoInstanceCreated; + private final PersistenceExceptionTranslator exceptionTranslator; + + private WriteConcern writeConcern; + + /** + * Creates a new {@link SimpleReactiveMongoDatabaseFactory} instance from the given {@link ConnectionString}. + * + * @param connectionString must not be {@literal null}. + * @throws UnknownHostException + */ + public SimpleReactiveMongoDatabaseFactory(ConnectionString connectionString) throws UnknownHostException { + this(MongoClients.create(connectionString), connectionString.getDatabase(), true); + } + + /** + * Creates a new {@link SimpleReactiveMongoDatabaseFactory} instance from the given {@link MongoClient}. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null}. + * @since 1.7 + */ + public SimpleReactiveMongoDatabaseFactory(MongoClient mongoClient, String databaseName) { + this(mongoClient, databaseName, false); + } + + private SimpleReactiveMongoDatabaseFactory(MongoClient client, String databaseName, boolean mongoInstanceCreated) { + + Assert.notNull(client, "MongoClient must not be null!"); + Assert.hasText(databaseName, "Database name must not be empty!"); + Assert.isTrue(databaseName.matches("[\\w-]+"), + "Database name must only contain letters, numbers, underscores and dashes!"); + + this.mongo = client; + this.databaseName = databaseName; + this.mongoInstanceCreated = mongoInstanceCreated; + this.exceptionTranslator = new MongoExceptionTranslator(); + } + + /** + * Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created. + * + * @param writeConcern the writeConcern to set + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase() + */ + public MongoDatabase getMongoDatabase() throws DataAccessException { + return getMongoDatabase(databaseName); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase(java.lang.String) + */ + public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { + + Assert.hasText(dbName, "Database name must not be empty."); + + MongoDatabase db = ReactiveMongoDbUtils.getMongoDatabase(mongo, dbName); + + if (writeConcern != null) { + + db = db.withWriteConcern(writeConcern); + } + + return db; + } + + /** + * Clean up the Mongo instance if it was created by the factory itself. + * + * @see DisposableBean#destroy() + */ + public void destroy() throws Exception { + if (mongoInstanceCreated) { + mongo.close(); + } + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getExceptionTranslator() + */ + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java index b2726f960..8416d0fdc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java @@ -31,7 +31,7 @@ import org.springframework.data.mongodb.core.convert.MongoConverters.StringToObj /** * Base class for {@link MongoConverter} implementations. Sets up a {@link GenericConversionService} and populates basic * converters. Allows registering {@link CustomConversions}. - * + * * @author Jon Brisbin * @author Oliver Gierke */ @@ -43,7 +43,7 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali /** * Creates a new {@link AbstractMongoConverter} using the given {@link GenericConversionService}. - * + * * @param conversionService */ public AbstractMongoConverter(GenericConversionService conversionService) { @@ -52,7 +52,7 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali /** * Registers the given custom conversions with the converter. - * + * * @param conversions */ public void setCustomConversions(CustomConversions conversions) { @@ -61,7 +61,7 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali /** * Registers {@link EntityInstantiators} to customize entity instantiation. - * + * * @param instantiators */ public void setInstantiators(EntityInstantiators instantiators) { @@ -89,7 +89,7 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali conversions.registerConvertersIn(conversionService); } - /* + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.convert.MongoWriter#convertToMongoType(java.lang.Object) */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java index 53b2be4c1..261626838 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.convert; +import reactor.core.publisher.Flux; + import java.math.BigDecimal; import java.math.BigInteger; import java.net.MalformedURLException; @@ -29,6 +31,7 @@ import java.util.concurrent.atomic.AtomicLong; import org.bson.Document; import org.bson.types.Code; import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; import org.springframework.core.convert.ConversionFailedException; import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.converter.ConditionalConverter; @@ -44,7 +47,7 @@ import org.springframework.util.StringUtils; /** * Wrapper class to contain useful converters for the usage with Mongo. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl @@ -59,7 +62,7 @@ abstract class MongoConverters { /** * Returns the converters to be registered. - * + * * @return * @since 1.9 */ @@ -79,6 +82,7 @@ abstract class MongoConverters { converters.add(DocumentToNamedMongoScriptConverter.INSTANCE); converters.add(CurrencyToStringConverter.INSTANCE); converters.add(StringToCurrencyConverter.INSTANCE); + converters.add(NumberToNumberConverterFactory.INSTANCE); converters.add(AtomicIntegerToIntegerConverter.INSTANCE); converters.add(AtomicLongToLongConverter.INSTANCE); converters.add(LongToAtomicLongConverter.INSTANCE); @@ -89,7 +93,7 @@ abstract class MongoConverters { /** * Simple singleton to convert {@link ObjectId}s to their {@link String} representation. - * + * * @author Oliver Gierke */ public static enum ObjectIdToStringConverter implements Converter { @@ -102,7 +106,7 @@ abstract class MongoConverters { /** * Simple singleton to convert {@link String}s to their {@link ObjectId} representation. - * + * * @author Oliver Gierke */ public static enum StringToObjectIdConverter implements Converter { @@ -115,7 +119,7 @@ abstract class MongoConverters { /** * Simple singleton to convert {@link ObjectId}s to their {@link java.math.BigInteger} representation. - * + * * @author Oliver Gierke */ public static enum ObjectIdToBigIntegerConverter implements Converter { @@ -128,7 +132,7 @@ abstract class MongoConverters { /** * Simple singleton to convert {@link BigInteger}s to their {@link ObjectId} representation. - * + * * @author Oliver Gierke */ public static enum BigIntegerToObjectIdConverter implements Converter { @@ -274,7 +278,7 @@ abstract class MongoConverters { /** * {@link Converter} implementation converting {@link Currency} into its ISO 4217 {@link String} representation. - * + * * @author Christoph Strobl * @since 1.9 */ @@ -293,9 +297,29 @@ abstract class MongoConverters { } } + @ReadingConverter + public static enum PublisherToFluxConverter implements Converter, Flux> { + + INSTANCE; + + /* + * (non-Javadoc) + * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) + */ + + @Override + public Flux convert(Publisher source) { + + if(source instanceof Flux){ + return (Flux) source; + } + return Flux.from((Publisher) source); + } + } + /** * {@link Converter} implementation converting ISO 4217 {@link String} into {@link Currency}. - * + * * @author Christoph Strobl * @since 1.9 */ @@ -318,7 +342,7 @@ abstract class MongoConverters { * {@link ConverterFactory} implementation using {@link NumberUtils} for number conversion and parsing. Additionally * deals with {@link AtomicInteger} and {@link AtomicLong} by calling {@code get()} before performing the actual * conversion. - * + * * @author Christoph Strobl * @since 1.9 */ @@ -351,7 +375,7 @@ abstract class MongoConverters { /** * Creates a new {@link NumberToNumberConverter} for the given target type. - * + * * @param targetType must not be {@literal null}. */ public NumberToNumberConverter(Class targetType) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java index 3956ead3b..007f9dc66 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java @@ -15,18 +15,22 @@ */ package org.springframework.data.mongodb.core.index; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationListener; import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.context.MappingContextEvent; import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.mongodb.core.IndexOperations; +import org.springframework.data.mongodb.core.IndexOperationsProvider; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; @@ -36,56 +40,53 @@ import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; import com.mongodb.MongoException; -import com.mongodb.client.MongoCursor; -import com.mongodb.client.model.IndexOptions; /** * Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext} * for indexing metadata and ensures the indexes to be available. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Philipp Schneider * @author Johno Crawford * @author Laurent Canet * @author Christoph Strobl + * @author Mark Paluch */ public class MongoPersistentEntityIndexCreator implements ApplicationListener> { private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class); private final Map, Boolean> classesSeen = new ConcurrentHashMap, Boolean>(); - private final MongoDbFactory mongoDbFactory; + private final IndexOperationsProvider indexOperationsProvider; private final MongoMappingContext mappingContext; private final IndexResolver indexResolver; /** * Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and * {@link MongoDbFactory}. - * - * @param mappingContext must not be {@literal null}. - * @param mongoDbFactory must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + * @param indexOperationsProvider must not be {@literal null}. */ - public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory) { - this(mappingContext, mongoDbFactory, new MongoPersistentEntityIndexResolver(mappingContext)); + public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, IndexOperationsProvider indexOperationsProvider) { + this(mappingContext, indexOperationsProvider, new MongoPersistentEntityIndexResolver(mappingContext)); } /** * Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and * {@link MongoDbFactory}. - * + * * @param mappingContext must not be {@literal null}. * @param mongoDbFactory must not be {@literal null}. * @param indexResolver must not be {@literal null}. */ - public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, MongoDbFactory mongoDbFactory, - IndexResolver indexResolver) { - - Assert.notNull(mongoDbFactory); + public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, IndexOperationsProvider indexOperationsProvider, + IndexResolver indexResolver) { + Assert.notNull(indexOperationsProvider); Assert.notNull(mappingContext); Assert.notNull(indexResolver); - this.mongoDbFactory = mongoDbFactory; + this.indexOperationsProvider = indexOperationsProvider; this.mappingContext = mappingContext; this.indexResolver = indexResolver; @@ -141,64 +142,15 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener cursor = mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()) - .listIndexes(org.bson.Document.class).iterator(); + List existingIndexes = indexOperations.getIndexInfo(); - while (cursor.hasNext()) { - - org.bson.Document index = cursor.next(); - if (ObjectUtils.nullSafeEquals(indexNameToLookUp, index.get("name"))) { - return index; - } - } + return existingIndexes.stream().// + filter(indexInfo -> ObjectUtils.nullSafeEquals(indexNameToLookUp, indexInfo.getName())).// + findFirst().// + orElse(null); } catch (Exception e) { LOGGER.debug( diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/InfiniteStream.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/InfiniteStream.java new file mode 100644 index 000000000..2e2897cc2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/InfiniteStream.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.QueryAnnotation; + +import reactor.core.Cancellation; + +/** + * Annotation to declare an infinite stream using repository query methods. An infinite stream uses MongoDB's + * {@link com.mongodb.CursorType#TailableAwait tailable} cursors to retrieve data from a capped collection and stream + * data as it is inserted into the collection. An infinite stream can only be used with streams that emit more than one + * element, such as {@link reactor.core.publisher.Flux} or {@link rx.Observable}. + *

          + * The stream may become dead, or invalid, if either the query returns no match or the cursor returns the document at + * the "end" of the collection and then the application deletes that document. + *

          + * A stream that is no longer in use must be {@link Cancellation#dispose()} disposed} otherwise the streams will linger + * and exhaust resources. + * + * @author Mark Paluch + * @see Tailable Cursors + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@QueryAnnotation +public @interface InfiniteStream { + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java new file mode 100644 index 000000000..13508b94b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java @@ -0,0 +1,78 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.io.Serializable; + +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.NoRepositoryBean; +import org.springframework.data.repository.reactive.ReactivePagingAndSortingRepository; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Mongo specific {@link org.springframework.data.repository.Repository} interface with reactive support. + * + * @author Mark Paluch + * @since 2.0 + */ +@NoRepositoryBean +public interface ReactiveMongoRepository extends ReactivePagingAndSortingRepository { + + /** + * Inserts the given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use + * the returned instance for further operations as the save operation might have changed the entity instance + * completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API. + * + * @param entity must not be {@literal null}. + * @return the saved entity + */ + Mono insert(S entity); + + /** + * Inserts the given entities. Assumes the instance to be new to be able to apply insertion optimizations. Use + * the returned instance for further operations as the save operation might have changed the entity instance + * completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API. + * + * @param entities must not be {@literal null}. + * @return the saved entity + */ + Flux insert(Iterable entities); + + /** + * Inserts the given entities. Assumes the instance to be new to be able to apply insertion optimizations. Use + * the returned instance for further operations as the save operation might have changed the entity instance + * completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API. + * + * @param entities must not be {@literal null}. + * @return the saved entity + */ + Flux insert(Publisher entities); + + /* (non-Javadoc) + * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example) + */ + Flux findAll(Example example); + + /* (non-Javadoc) + * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort) + */ + Flux findAll(Example example, Sort sort); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java new file mode 100644 index 000000000..8be556629 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java @@ -0,0 +1,141 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.config; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Import; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactoryBean; +import org.springframework.data.repository.config.DefaultRepositoryBaseClass; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.data.repository.query.QueryLookupStrategy.Key; + +/** + * Annotation to activate reactive MongoDB repositories. If no base package is configured through either + * {@link #value()}, {@link #basePackages()} or {@link #basePackageClasses()} it will trigger scanning of the package of + * annotated class. + * + * @author Mark Paluch + * @since 2.0 + */ +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Inherited +@Import(ReactiveMongoRepositoriesRegistrar.class) +public @interface EnableReactiveMongoRepositories { + + /** + * Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.: + * {@code @EnableReactiveMongoRepositories("org.my.pkg")} instead of + * {@code @EnableReactiveMongoRepositories(basePackages="org.my.pkg")}. + */ + String[] value() default {}; + + /** + * Base packages to scan for annotated components. {@link #value()} is an alias for (and mutually exclusive with) this + * attribute. Use {@link #basePackageClasses()} for a type-safe alternative to String-based package names. + */ + String[] basePackages() default {}; + + /** + * Type-safe alternative to {@link #basePackages()} for specifying the packages to scan for annotated components. The + * package of each class specified will be scanned. Consider creating a special no-op marker class or interface in + * each package that serves no purpose other than being referenced by this attribute. + */ + Class[] basePackageClasses() default {}; + + /** + * Specifies which types are eligible for component scanning. Further narrows the set of candidate components from + * everything in {@link #basePackages()} to everything in the base packages that matches the given filter or filters. + */ + Filter[] includeFilters() default {}; + + /** + * Specifies which types are not eligible for component scanning. + */ + Filter[] excludeFilters() default {}; + + /** + * Returns the postfix to be used when looking up custom repository implementations. Defaults to {@literal Impl}. So + * for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning + * for {@code PersonRepositoryImpl}. + * + * @return + */ + String repositoryImplementationPostfix() default "Impl"; + + /** + * Configures the location of where to find the Spring Data named queries properties file. Will default to + * {@code META-INF/mongo-named-queries.properties}. + * + * @return + */ + String namedQueriesLocation() default ""; + + /** + * Returns the key of the {@link QueryLookupStrategy} to be used for lookup queries for query methods. Defaults to + * {@link Key#CREATE_IF_NOT_FOUND}. + * + * @return + */ + Key queryLookupStrategy() default Key.CREATE_IF_NOT_FOUND; + + /** + * Returns the {@link FactoryBean} class to be used for each repository instance. Defaults to + * {@link MongoRepositoryFactoryBean}. + * + * @return + */ + Class repositoryFactoryBeanClass() default ReactiveMongoRepositoryFactoryBean.class; + + /** + * Configure the repository base class to be used to create repository proxies for this particular configuration. + * + * @return + */ + Class repositoryBaseClass() default DefaultRepositoryBaseClass.class; + + /** + * Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected. + * + * @return + */ + String reactiveMongoTemplateRef() default "reactiveMongoTemplate"; + + /** + * Whether to automatically create indexes for query methods defined in the repository interface. + * + * @return + */ + boolean createIndexesForQueryMethods() default false; + + /** + * Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the + * repositories infrastructure. + */ + boolean considerNestedRepositories() default false; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java index 2c154948d..aed4785ec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java @@ -18,23 +18,34 @@ package org.springframework.data.mongodb.repository.config; import java.lang.annotation.Annotation; import java.util.Collection; import java.util.Collections; +import java.util.stream.Collectors; +import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.core.annotation.AnnotationAttributes; +import org.springframework.core.io.ResourceLoader; import org.springframework.data.config.ParsingUtils; +import org.springframework.data.mongodb.config.BeanNames; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean; import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource; +import org.springframework.data.repository.config.RepositoryConfiguration; import org.springframework.data.repository.config.RepositoryConfigurationExtension; import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport; +import org.springframework.data.repository.config.RepositoryConfigurationSource; import org.springframework.data.repository.config.XmlRepositoryConfigurationSource; +import org.springframework.data.repository.util.ReactiveWrappers; import org.w3c.dom.Element; /** * {@link RepositoryConfigurationExtension} for MongoDB. * * @author Oliver Gierke + * @author Mark Paluch */ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurationExtensionSupport { @@ -73,7 +84,7 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati */ @Override protected Collection> getIdentifyingAnnotations() { - return Collections.>singleton(Document.class); + return Collections.singleton(Document.class); } /* @@ -82,7 +93,7 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati */ @Override protected Collection> getIdentifyingTypes() { - return Collections.>singleton(MongoRepository.class); + return Collections.singleton(MongoRepository.class); } /* @@ -110,4 +121,43 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati builder.addPropertyReference("mongoOperations", attributes.getString("mongoTemplateRef")); builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods")); } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#registerBeansForRoot(org.springframework.beans.factory.support.BeanDefinitionRegistry, org.springframework.data.repository.config.RepositoryConfigurationSource) + */ + @Override + public void registerBeansForRoot(BeanDefinitionRegistry registry, RepositoryConfigurationSource configurationSource) { + + super.registerBeansForRoot(registry, configurationSource); + + if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME)) { + + RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class); + definition.setRole(AbstractBeanDefinition.ROLE_INFRASTRUCTURE); + definition.setSource(configurationSource.getSource()); + + registry.registerBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME, definition); + } + } + + @Override + public Collection> getRepositoryConfigurations( + T configSource, ResourceLoader loader, boolean strictMatchesOnly) { + + Collection> repositoryConfigurations = super.getRepositoryConfigurations(configSource, + loader, strictMatchesOnly); + + if (ReactiveWrappers.isAvailable()) { + + return repositoryConfigurations.stream().filter(configuration -> { + + Class repositoryInterface = super.loadRepositoryInterface(configuration, loader); + return !RepositoryType.isReactiveRepository(repositoryInterface); + + }).collect(Collectors.toList()); + } + + return repositoryConfigurations; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java new file mode 100644 index 000000000..e7675b0ec --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java @@ -0,0 +1,49 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.config; + +import java.lang.annotation.Annotation; + +import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; +import org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport; +import org.springframework.data.repository.config.RepositoryConfigurationExtension; + +/** + * Mongo-specific {@link ImportBeanDefinitionRegistrar}. + * + * @author Mark Paluch + */ +class ReactiveMongoRepositoriesRegistrar extends RepositoryBeanDefinitionRegistrarSupport { + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getAnnotation() + */ + @Override + protected Class getAnnotation() { + return EnableReactiveMongoRepositories.class; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getExtension() + */ + @Override + protected RepositoryConfigurationExtension getExtension() { + return new ReactiveMongoRepositoryConfigurationExtension(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java new file mode 100644 index 000000000..32fbbf54c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java @@ -0,0 +1,171 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.config; + +import java.lang.annotation.Annotation; +import java.util.Collection; +import java.util.Collections; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.support.AbstractBeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.core.annotation.AnnotationAttributes; +import org.springframework.core.io.ResourceLoader; +import org.springframework.data.config.ParsingUtils; +import org.springframework.data.mongodb.config.BeanNames; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactoryBean; +import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource; +import org.springframework.data.repository.config.RepositoryConfiguration; +import org.springframework.data.repository.config.RepositoryConfigurationExtension; +import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport; +import org.springframework.data.repository.config.RepositoryConfigurationSource; +import org.springframework.data.repository.config.XmlRepositoryConfigurationSource; +import org.w3c.dom.Element; + +/** + * Reactive {@link RepositoryConfigurationExtension} for MongoDB. + * + * @author Mark Paluch + */ +public class ReactiveMongoRepositoryConfigurationExtension extends RepositoryConfigurationExtensionSupport { + + private static final String MONGO_TEMPLATE_REF = "reactive-mongo-template-ref"; + private static final String CREATE_QUERY_INDEXES = "create-query-indexes"; + + private boolean fallbackMappingContextCreated = false; + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModuleName() + */ + @Override + public String getModuleName() { + return "Reactive MongoDB"; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModulePrefix() + */ + @Override + protected String getModulePrefix() { + return "mongo"; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtension#getRepositoryFactoryClassName() + */ + public String getRepositoryFactoryClassName() { + return ReactiveMongoRepositoryFactoryBean.class.getName(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingAnnotations() + */ + @Override + protected Collection> getIdentifyingAnnotations() { + return Collections.singleton(Document.class); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingTypes() + */ + @Override + protected Collection> getIdentifyingTypes() { + return Collections.singleton(ReactiveMongoRepository.class); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.RepositoryConfigurationSource) + */ + @Override + public void postProcess(BeanDefinitionBuilder builder, RepositoryConfigurationSource source) { + + if (fallbackMappingContextCreated) { + builder.addPropertyReference("mappingContext", BeanNames.MAPPING_CONTEXT_BEAN_NAME); + } + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.XmlRepositoryConfigurationSource) + */ + @Override + public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfigurationSource config) { + + Element element = config.getElement(); + + ParsingUtils.setPropertyReference(builder, element, MONGO_TEMPLATE_REF, "reactiveMongoOperations"); + ParsingUtils.setPropertyValue(builder, element, CREATE_QUERY_INDEXES, "createIndexesForQueryMethods"); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource) + */ + @Override + public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfigurationSource config) { + + AnnotationAttributes attributes = config.getAttributes(); + + builder.addPropertyReference("reactiveMongoOperations", attributes.getString("reactiveMongoTemplateRef")); + builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods")); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#registerBeansForRoot(org.springframework.beans.factory.support.BeanDefinitionRegistry, org.springframework.data.repository.config.RepositoryConfigurationSource) + */ + @Override + public void registerBeansForRoot(BeanDefinitionRegistry registry, RepositoryConfigurationSource configurationSource) { + + super.registerBeansForRoot(registry, configurationSource); + + if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME)) { + + RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class); + definition.setRole(AbstractBeanDefinition.ROLE_INFRASTRUCTURE); + definition.setSource(configurationSource.getSource()); + + registry.registerBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME, definition); + } + } + + @Override + public Collection> getRepositoryConfigurations( + T configSource, ResourceLoader loader, boolean strictMatchesOnly) { + + Collection> repositoryConfigurations = super.getRepositoryConfigurations(configSource, + loader, strictMatchesOnly); + + return repositoryConfigurations.stream().filter(configuration -> { + + Class repositoryInterface = super.loadRepositoryInterface(configuration, loader); + return RepositoryType.isReactiveRepository(repositoryInterface); + + }).collect(Collectors.toList()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/RepositoryType.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/RepositoryType.java new file mode 100644 index 000000000..aa7457f0e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/RepositoryType.java @@ -0,0 +1,70 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.config; + +import java.lang.reflect.Method; + +import org.springframework.data.repository.util.ReactiveWrappers; + +import lombok.experimental.UtilityClass; + +/** + * Utility class to discover whether a repository interface uses reactive wrapper types. + * + * @author Mark Paluch + */ +@UtilityClass +class RepositoryType { + + /** + * @param repositoryInterface + * @return {@literal true} if the {@code repositoryInterface} uses reactive wrapper types. + * @see ReactiveWrappers + * @see ReactiveWrappers#isAvailable() + */ + public static boolean isReactiveRepository(Class repositoryInterface) { + + if (!ReactiveWrappers.isAvailable()) { + return false; + } + + Method[] methods = repositoryInterface.getMethods(); + + for (Method method : methods) { + + if (usesReactiveWrappers(method)) { + return true; + } + } + + return false; + } + + private static boolean usesReactiveWrappers(Method method) { + + if (ReactiveWrappers.supports(method.getReturnType())) { + return true; + } + + for (Class parameterType : method.getParameterTypes()) { + if (ReactiveWrappers.supports(parameterType)) { + return true; + } + } + + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java new file mode 100644 index 000000000..dda007048 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java @@ -0,0 +1,212 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import org.reactivestreams.Publisher; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.EntityInstantiators; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.CollectionExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.DeleteExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.PagedExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingConverter; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.SingleEntityExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.SlicedExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.TailExecution; +import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.util.ReactiveWrapperConverters; +import org.springframework.util.Assert; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Base class for reactive {@link RepositoryQuery} implementations for MongoDB. + * + * @author Mark Paluch + */ +public abstract class AbstractReactiveMongoQuery implements RepositoryQuery { + + private final MongoQueryMethod method; + private final ReactiveMongoOperations operations; + private final EntityInstantiators instantiators; + + /** + * Creates a new {@link AbstractReactiveMongoQuery} from the given {@link MongoQueryMethod} and + * {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param conversionService must not be {@literal null}. + */ + public AbstractReactiveMongoQuery(MongoQueryMethod method, ReactiveMongoOperations operations, + ConversionService conversionService) { + + Assert.notNull(method, "MongoQueryMethod must not be null!"); + Assert.notNull(operations, "ReactiveMongoOperations must not be null!"); + + this.method = method; + this.operations = operations; + this.instantiators = new EntityInstantiators(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.RepositoryQuery#getQueryMethod() + */ + public MongoQueryMethod getQueryMethod() { + return method; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.RepositoryQuery#execute(java.lang.Object[]) + */ + public Object execute(Object[] parameters) { + + boolean hasReactiveParameters = hasReactiveWrapperParameter(); + + if (hasReactiveParameters) { + return executeDeferred(parameters); + } + + return execute(new MongoParametersParameterAccessor(method, parameters)); + } + + @SuppressWarnings("unchecked") + private Object executeDeferred(Object[] parameters) { + + ReactiveMongoParameterAccessor parameterAccessor = new ReactiveMongoParameterAccessor(method, parameters); + + if (getQueryMethod().isCollectionQuery()) { + return Flux.defer(() -> (Publisher) execute(parameterAccessor)); + } + + return Mono.defer(() -> (Mono) execute(parameterAccessor)); + } + + private Object execute(MongoParameterAccessor parameterAccessor) { + + Query query = createQuery(new ConvertingParameterAccessor(operations.getConverter(), parameterAccessor)); + + applyQueryMetaAttributesWhenPresent(query); + + ResultProcessor processor = method.getResultProcessor().withDynamicProjection(parameterAccessor); + String collection = method.getEntityInformation().getCollectionName(); + + ReactiveMongoQueryExecution execution = getExecution(query, parameterAccessor, + new ResultProcessingConverter(processor, operations, instantiators)); + + return execution.execute(query, processor.getReturnedType().getDomainType(), collection); + } + + private boolean hasReactiveWrapperParameter() { + + for (MongoParameters.MongoParameter mongoParameter : method.getParameters()) { + if (ReactiveWrapperConverters.supports(mongoParameter.getType())) { + return true; + } + } + return false; + } + + /** + * Returns the execution instance to use. + * + * @param query must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @param resultProcessing must not be {@literal null}. + * @return + */ + private ReactiveMongoQueryExecution getExecution(Query query, MongoParameterAccessor accessor, + Converter resultProcessing) { + return new ResultProcessingExecution(getExecutionToWrap(accessor), resultProcessing); + } + + private ReactiveMongoQueryExecution getExecutionToWrap(MongoParameterAccessor accessor) { + + if (isDeleteQuery()) { + return new DeleteExecution(operations, method); + } else if (method.isGeoNearQuery()) { + return new GeoNearExecution(operations, accessor, method.getReturnType()); + } else if (method.isSliceQuery()) { + return new SlicedExecution(operations, accessor.getPageable()); + } else if (isInfiniteStream(method)) { + return new TailExecution(operations, accessor.getPageable()); + } else if (method.isCollectionQuery()) { + return new CollectionExecution(operations, accessor.getPageable()); + } else if (method.isPageQuery()) { + return new PagedExecution(operations, accessor.getPageable()); + } else { + return new SingleEntityExecution(operations, isCountQuery()); + } + } + + private boolean isInfiniteStream(MongoQueryMethod method) { + return method.getInfiniteStreamAnnotation() != null; + } + + Query applyQueryMetaAttributesWhenPresent(Query query) { + + if (method.hasQueryMetaAttributes()) { + query.setMeta(method.getQueryMetaAttributes()); + } + + return query; + } + + /** + * Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to + * {@link #createQuery(ConvertingParameterAccessor)} by default but allows customization of the count query to be + * triggered. + * + * @param accessor must not be {@literal null}. + * @return + */ + protected Query createCountQuery(ConvertingParameterAccessor accessor) { + return applyQueryMetaAttributesWhenPresent(createQuery(accessor)); + } + + /** + * Creates a {@link Query} instance using the given {@link ParameterAccessor} + * + * @param accessor must not be {@literal null}. + * @return + */ + protected abstract Query createQuery(ConvertingParameterAccessor accessor); + + /** + * Returns whether the query should get a count projection applied. + * + * @return + */ + protected abstract boolean isCountQuery(); + + /** + * Return weather the query should delete matching documents. + * + * @return + * @since 1.5 + */ + protected abstract boolean isDeleteQuery(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java index 17ba3c8f4..e51375178 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java @@ -66,7 +66,7 @@ interface MongoQueryExecution { * @author Oliver Gierke */ @RequiredArgsConstructor - static final class CollectionExecution implements MongoQueryExecution { + final class CollectionExecution implements MongoQueryExecution { private final @NonNull MongoOperations operations; private final Pageable pageable; @@ -89,7 +89,7 @@ interface MongoQueryExecution { * @since 1.5 */ @RequiredArgsConstructor - static final class SlicedExecution implements MongoQueryExecution { + final class SlicedExecution implements MongoQueryExecution { private final @NonNull MongoOperations operations; private final @NonNull Pageable pageable; @@ -121,7 +121,7 @@ interface MongoQueryExecution { * @author Mark Paluch */ @RequiredArgsConstructor - static final class PagedExecution implements MongoQueryExecution { + final class PagedExecution implements MongoQueryExecution { private final @NonNull MongoOperations operations; private final @NonNull Pageable pageable; @@ -161,7 +161,7 @@ interface MongoQueryExecution { * @author Oliver Gierke */ @RequiredArgsConstructor - static final class SingleEntityExecution implements MongoQueryExecution { + final class SingleEntityExecution implements MongoQueryExecution { private final MongoOperations operations; private final boolean countProjection; @@ -182,7 +182,7 @@ interface MongoQueryExecution { * @author Oliver Gierke */ @RequiredArgsConstructor - static class GeoNearExecution implements MongoQueryExecution { + class GeoNearExecution implements MongoQueryExecution { private final MongoOperations operations; private final MongoParameterAccessor accessor; @@ -248,7 +248,7 @@ interface MongoQueryExecution { * @author Oliver Gierke * @author Mark Paluch */ - static final class PagingGeoNearExecution extends GeoNearExecution { + final class PagingGeoNearExecution extends GeoNearExecution { private final MongoOperations operations; private final MongoParameterAccessor accessor; @@ -299,7 +299,7 @@ interface MongoQueryExecution { * @since 1.5 */ @RequiredArgsConstructor - static final class DeleteExecution implements MongoQueryExecution { + final class DeleteExecution implements MongoQueryExecution { private final MongoOperations operations; private final MongoQueryMethod method; @@ -325,7 +325,7 @@ interface MongoQueryExecution { * @since 1.7 */ @RequiredArgsConstructor - static final class StreamExecution implements MongoQueryExecution { + final class StreamExecution implements MongoQueryExecution { private final @NonNull MongoOperations operations; private final @NonNull Converter resultProcessing; @@ -356,7 +356,7 @@ interface MongoQueryExecution { * @since 1.9 */ @RequiredArgsConstructor - static final class ResultProcessingExecution implements MongoQueryExecution { + final class ResultProcessingExecution implements MongoQueryExecution { private final @NonNull MongoQueryExecution delegate; private final @NonNull Converter converter; @@ -378,7 +378,7 @@ interface MongoQueryExecution { * @since 1.9 */ @RequiredArgsConstructor - static final class ResultProcessingConverter implements Converter { + final class ResultProcessingConverter implements Converter { private final @NonNull ResultProcessor processor; private final @NonNull MongoOperations operations; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java index 80adf849e..731b3fdaf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java @@ -28,6 +28,7 @@ import org.springframework.data.geo.GeoResults; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.repository.InfiniteStream; import org.springframework.data.mongodb.repository.Meta; import org.springframework.data.mongodb.repository.Query; import org.springframework.data.projection.ProjectionFactory; @@ -219,6 +220,16 @@ public class MongoQueryMethod extends QueryMethod { return AnnotatedElementUtils.findMergedAnnotation(method, Meta.class); } + /** + * Returns the {@link InfiniteStream} annotation that is applied to the method or {@code null} if not available. + * + * @return + * @since 2.0 + */ + InfiniteStream getInfiniteStreamAnnotation() { + return AnnotatedElementUtils.findMergedAnnotation(method, InfiniteStream.class); + } + /** * Returns the {@link org.springframework.data.mongodb.core.query.Meta} attributes to be applied. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java new file mode 100644 index 000000000..b7dd7304e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java @@ -0,0 +1,96 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import org.springframework.data.repository.util.ReactiveWrapperConverters; +import org.springframework.data.repository.util.ReactiveWrappers; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.core.publisher.MonoProcessor; + +/** + * Reactive {@link org.springframework.data.repository.query.ParametersParameterAccessor} implementation that subscribes + * to reactive parameter wrapper types upon creation. This class performs synchronization when acessing parameters. + * + * @author Mark Paluch + */ +class ReactiveMongoParameterAccessor extends MongoParametersParameterAccessor { + + private final Object[] values; + private final MonoProcessor[] subscriptions; + + public ReactiveMongoParameterAccessor(MongoQueryMethod method, Object[] values) { + + super(method, values); + + this.values = values; + this.subscriptions = new MonoProcessor[values.length]; + + for (int i = 0; i < values.length; i++) { + + Object value = values[i]; + + if (value == null) { + continue; + } + + if (!ReactiveWrappers.supports(value.getClass())) { + continue; + } + + if (ReactiveWrappers.isSingleValueType(value.getClass())) { + subscriptions[i] = ReactiveWrapperConverters.toWrapper(value, Mono.class).subscribe(); + } else { + subscriptions[i] = ReactiveWrapperConverters.toWrapper(value, Flux.class).collectList().subscribe(); + } + } + } + + /* (non-Javadoc) + * @see org.springframework.data.repository.query.ParametersParameterAccessor#getValue(int) + */ + @SuppressWarnings("unchecked") + @Override + protected T getValue(int index) { + + if (subscriptions[index] != null) { + return (T) subscriptions[index].block(); + } + + return super.getValue(index); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.MongoParametersParameterAccessor#getValues() + */ + @Override + public Object[] getValues() { + + Object[] result = new Object[values.length]; + for (int i = 0; i < result.length; i++) { + result[i] = getValue(i); + } + return result; + } + + /* (non-Javadoc) + * @see org.springframework.data.repository.query.ParametersParameterAccessor#getBindableValue(int) + */ + public Object getBindableValue(int index) { + return getValue(getParameters().getBindableParameter(index).getIndex()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java new file mode 100644 index 000000000..023ebac6e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java @@ -0,0 +1,301 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.EntityInstantiators; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Slice; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.support.ReactivePageImpl; +import org.springframework.data.mongodb.repository.support.ReactiveSliceImpl; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.util.ReactiveWrappers; +import org.springframework.data.util.TypeInformation; +import org.springframework.util.ClassUtils; + +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import com.mongodb.client.result.DeleteResult; + +/** + * Set of classes to contain query execution strategies. Depending (mostly) on the return type of a + * {@link org.springframework.data.repository.query.QueryMethod} a {@link AbstractReactiveMongoQuery} can be executed in various + * flavors. + * + * @author Mark Paluch + * @since 2.0 + */ +interface ReactiveMongoQueryExecution { + + Object execute(Query query, Class type, String collection); + + /** + * {@link ReactiveMongoQueryExecution} for collection returning queries. + * + * @author Mark Paluch + */ + @RequiredArgsConstructor + final class CollectionExecution implements ReactiveMongoQueryExecution { + + private final @NonNull ReactiveMongoOperations operations; + private final Pageable pageable; + + @Override + public Object execute(Query query, Class type, String collection) { + return operations.find(query.with(pageable), type, collection); + } + } + + /** + * {@link ReactiveMongoQueryExecution} for collection returning queries using tailable cursors. + * + * @author Mark Paluch + */ + @RequiredArgsConstructor + final class TailExecution implements ReactiveMongoQueryExecution { + + private final @NonNull ReactiveMongoOperations operations; + private final Pageable pageable; + + @Override + public Object execute(Query query, Class type, String collection) { + return operations.tail(query.with(pageable), type, collection); + } + } + + /** + * {@link ReactiveMongoQueryExecution} for {@link Slice} query methods. + * + * @author Mark Paluch + */ + @RequiredArgsConstructor + final class SlicedExecution implements ReactiveMongoQueryExecution { + + private final @NonNull ReactiveMongoOperations operations; + private final @NonNull Pageable pageable; + + @Override + public Object execute(Query query, Class type, String collection) { + + int pageSize = pageable.getPageSize(); + + // Apply Pageable but tweak limit to peek into next page + Query modifiedQuery = query.with(pageable).limit(pageSize + 1); + Flux flux = operations.find(modifiedQuery, type, collection); + + return Mono.fromSupplier(() -> new ReactiveSliceImpl<>(flux, pageable)); + } + } + + /** + * {@link ReactiveMongoQueryExecution} for pagination queries. + * + * @author Mark Paluch + */ + @RequiredArgsConstructor + final class PagedExecution implements ReactiveMongoQueryExecution { + + private final @NonNull ReactiveMongoOperations operations; + private final @NonNull Pageable pageable; + + @Override + public Object execute(Query query, Class type, String collection) { + + int overallLimit = query.getLimit(); + Mono count = operations.count(query, type, collection); + + // Apply raw pagination + query = query.with(pageable); + + // Adjust limit if page would exceed the overall limit + if (overallLimit != 0 && pageable.getOffset() + pageable.getPageSize() > overallLimit) { + query.limit(overallLimit - pageable.getOffset()); + } + + Flux flux = operations.find(query, type, collection); + + return Mono.fromSupplier(() -> new ReactivePageImpl<>(flux, pageable, count)); + } + } + + /** + * {@link ReactiveMongoQueryExecution} to return a single entity. + * + * @author Mark Paluch + */ + @RequiredArgsConstructor + final class SingleEntityExecution implements ReactiveMongoQueryExecution { + + private final ReactiveMongoOperations operations; + private final boolean countProjection; + + @Override + public Object execute(Query query, Class type, String collection) { + return countProjection ? operations.count(query, type, collection) : operations.findOne(query, type, collection); + } + } + + /** + * {@link MongoQueryExecution} to execute geo-near queries. + * + * @author Mark Paluch + */ + @RequiredArgsConstructor + class GeoNearExecution implements ReactiveMongoQueryExecution { + + private final ReactiveMongoOperations operations; + private final MongoParameterAccessor accessor; + private final TypeInformation returnType; + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + @Override + public Object execute(Query query, Class type, String collection) { + + Flux> results = doExecuteQuery(query, type, collection); + return isStreamOfGeoResult() ? results : results.map(GeoResult::getContent); + } + + @SuppressWarnings("unchecked") + protected Flux> doExecuteQuery(Query query, Class type, String collection) { + + Point nearLocation = accessor.getGeoNearLocation(); + NearQuery nearQuery = NearQuery.near(nearLocation); + + if (query != null) { + nearQuery.query(query); + } + + Range distances = accessor.getDistanceRange(); + Distance maxDistance = distances.getUpperBound(); + + if (maxDistance != null) { + nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric()); + } + + Distance minDistance = distances.getLowerBound(); + + if (minDistance != null) { + nearQuery.minDistance(minDistance).in(minDistance.getMetric()); + } + + Pageable pageable = accessor.getPageable(); + + if (pageable != null) { + nearQuery.with(pageable); + } + + return (Flux) operations.geoNear(nearQuery, type, collection); + } + + private boolean isStreamOfGeoResult() { + + if (!ReactiveWrappers.supports(returnType.getType())) { + return false; + } + + TypeInformation componentType = returnType.getComponentType(); + return componentType != null && GeoResult.class.equals(componentType.getType()); + } + } + + /** + * {@link ReactiveMongoQueryExecution} removing documents matching the query. + * + * @author Mark Paluch + */ + @RequiredArgsConstructor + final class DeleteExecution implements ReactiveMongoQueryExecution { + + private final ReactiveMongoOperations operations; + private final MongoQueryMethod method; + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + */ + @Override + public Object execute(Query query, Class type, String collection) { + + if (method.isCollectionQuery()) { + return operations.findAllAndRemove(query, type, collection); + } + + return operations.remove(query, type, collection).map(DeleteResult::getDeletedCount); + } + } + + /** + * An {@link ReactiveMongoQueryExecution} that wraps the results of the given delegate with the given result + * processing. + */ + @RequiredArgsConstructor + final class ResultProcessingExecution implements ReactiveMongoQueryExecution { + + private final @NonNull ReactiveMongoQueryExecution delegate; + private final @NonNull Converter converter; + + @Override + public Object execute(Query query, Class type, String collection) { + return converter.convert(delegate.execute(query, type, collection)); + } + } + + /** + * A {@link Converter} to post-process all source objects using the given {@link ResultProcessor}. + * + * @author Mark Paluch + */ + @RequiredArgsConstructor + final class ResultProcessingConverter implements Converter { + + private final @NonNull ResultProcessor processor; + private final @NonNull ReactiveMongoOperations operations; + private final @NonNull EntityInstantiators instantiators; + + /* + * (non-Javadoc) + * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) + */ + @Override + public Object convert(Object source) { + + ReturnedType returnedType = processor.getReturnedType(); + + if (ClassUtils.isPrimitiveOrWrapper(returnedType.getReturnedType())) { + return source; + } + + Converter converter = new DtoInstantiatingConverter(returnedType.getReturnedType(), + operations.getConverter().getMappingContext(), instantiators); + + return processor.processResult(source, converter); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java new file mode 100644 index 000000000..60bf57255 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java @@ -0,0 +1,150 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.springframework.data.repository.util.ClassUtils.*; + +import java.lang.reflect.Method; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.util.ReactiveWrappers; +import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.util.TypeInformation; + +/** + * Reactive specific implementation of {@link MongoQueryMethod}. + * + * @author Mark Paluch + * @since 2.0 + */ +public class ReactiveMongoQueryMethod extends MongoQueryMethod { + + private static final ClassTypeInformation PAGE_TYPE = ClassTypeInformation.from(Page.class); + private static final ClassTypeInformation SLICE_TYPE = ClassTypeInformation.from(Slice.class); + + private final Method method; + + /** + * Creates a new {@link ReactiveMongoQueryMethod} from the given {@link Method}. + * + * @param method must not be {@literal null}. + * @param metadata must not be {@literal null}. + * @param projectionFactory must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + */ + public ReactiveMongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory, + MappingContext, MongoPersistentProperty> mappingContext) { + + super(method, metadata, projectionFactory, mappingContext); + + if (hasParameterOfType(method, Pageable.class)) { + + TypeInformation returnType = ClassTypeInformation.fromReturnTypeOf(method); + + boolean multiWrapper = ReactiveWrappers.isMultiValueType(returnType.getType()); + boolean singleWrapperWithWrappedPageableResult = ReactiveWrappers.isSingleValueType(returnType.getType()) + && (PAGE_TYPE.isAssignableFrom(returnType.getComponentType()) + || SLICE_TYPE.isAssignableFrom(returnType.getComponentType())); + + if (!multiWrapper && !singleWrapperWithWrappedPageableResult) { + throw new IllegalStateException(String.format( + "Method has to use a either multi-item reactive wrapper return type or a wrapped Page/Slice type. Offending method: %s", + method.toString())); + } + + if (hasParameterOfType(method, Sort.class)) { + throw new IllegalStateException(String.format("Method must not have Pageable *and* Sort parameter. " + + "Use sorting capabilities on Pageble instead! Offending method: %s", method.toString())); + } + } + + this.method = method; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.MongoQueryMethod#createParameters(java.lang.reflect.Method) + */ + @Override + protected MongoParameters createParameters(Method method) { + return new MongoParameters(method, isGeoNearQuery(method)); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.QueryMethod#isCollectionQuery() + */ + @Override + public boolean isCollectionQuery() { + return !(isPageQuery() || isSliceQuery()) && ReactiveWrappers.isMultiValueType(method.getReturnType()); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.MongoQueryMethod#isGeoNearQuery() + */ + @Override + public boolean isGeoNearQuery() { + return isGeoNearQuery(method); + } + + private boolean isGeoNearQuery(Method method) { + + if (ReactiveWrappers.supports(method.getReturnType())) { + TypeInformation from = ClassTypeInformation.fromReturnTypeOf(method); + return GeoResult.class.equals(from.getComponentType().getType()); + } + + return false; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.QueryMethod#isModifyingQuery() + */ + @Override + public boolean isModifyingQuery() { + return super.isModifyingQuery(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.QueryMethod#isQueryForEntity() + */ + @Override + public boolean isQueryForEntity() { + return super.isQueryForEntity(); + } + + /* + * All reactive query methods are streaming queries. + * (non-Javadoc) + * @see org.springframework.data.repository.query.QueryMethod#isStreamQuery() + */ + @Override + public boolean isStreamQuery() { + return true; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java new file mode 100644 index 000000000..e974991fe --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java @@ -0,0 +1,150 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import org.springframework.core.convert.ConversionService; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.repository.query.QueryMethod; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.util.StringUtils; + +import com.mongodb.util.JSONParseException; + +/** + * Reactive PartTree {@link RepositoryQuery} implementation for Mongo. + * + * @author Mark Paluch + */ +public class ReactivePartTreeMongoQuery extends AbstractReactiveMongoQuery { + + private final PartTree tree; + private final boolean isGeoNearQuery; + private final MappingContext context; + private final ResultProcessor processor; + + /** + * Creates a new {@link ReactivePartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param conversionService must not be {@literal null}. + */ + public ReactivePartTreeMongoQuery(MongoQueryMethod method, ReactiveMongoOperations mongoOperations, ConversionService conversionService) { + + super(method, mongoOperations, conversionService); + + this.processor = method.getResultProcessor(); + this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); + this.isGeoNearQuery = method.isGeoNearQuery(); + this.context = mongoOperations.getConverter().getMappingContext(); + } + + /** + * Return the {@link PartTree} backing the query. + * + * @return the tree + */ + public PartTree getTree() { + return tree; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor, boolean) + */ + @Override + protected Query createQuery(ConvertingParameterAccessor accessor) { + + MongoQueryCreator creator = new MongoQueryCreator(tree, accessor, context, isGeoNearQuery); + Query query = creator.createQuery(); + + if (tree.isLimiting()) { + query.limit(tree.getMaxResults()); + } + + TextCriteria textCriteria = accessor.getFullText(); + if (textCriteria != null) { + query.addCriteria(textCriteria); + } + + String fieldSpec = this.getQueryMethod().getFieldSpecification(); + + if (!StringUtils.hasText(fieldSpec)) { + + ReturnedType returnedType = processor.withDynamicProjection(accessor).getReturnedType(); + + if (returnedType.isProjecting()) { + + Field fields = query.fields(); + + for (String field : returnedType.getInputProperties()) { + fields.include(field); + } + } + + return query; + } + + try { + + BasicQuery result = new BasicQuery(query.getQueryObject().toJson(), fieldSpec); + result.setSortObject(query.getSortObject()); + + return result; + + } catch (JSONParseException o_O) { + throw new IllegalStateException(String.format("Invalid query or field specification in %s!", getQueryMethod()), + o_O); + } + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createCountQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor) + */ + @Override + protected Query createCountQuery(ConvertingParameterAccessor accessor) { + return new MongoQueryCreator(tree, accessor, context, false).createQuery(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery() + */ + @Override + protected boolean isCountQuery() { + return tree.isCountProjection(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery() + */ + @Override + protected boolean isDeleteQuery() { + return tree.isDelete(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java new file mode 100644 index 000000000..a2426008d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java @@ -0,0 +1,143 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.convert.ConversionService; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.query.ExpressionEvaluatingParameterBinder.BindingContext; +import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBinding; +import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBindingParser; +import org.springframework.data.repository.query.EvaluationContextProvider; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.util.Assert; + +/** + * Query to use a plain JSON String to create the {@link Query} to actually execute. + * + * @author Mark Paluch + */ +public class ReactiveStringBasedMongoQuery extends AbstractReactiveMongoQuery { + + private static final String COUND_AND_DELETE = "Manually defined query for %s cannot be both a count and delete query at the same time!"; + private static final Logger LOG = LoggerFactory.getLogger(ReactiveStringBasedMongoQuery.class); + private static final ParameterBindingParser BINDING_PARSER = ParameterBindingParser.INSTANCE; + + private final String query; + private final String fieldSpec; + private final boolean isCountQuery; + private final boolean isDeleteQuery; + private final List queryParameterBindings; + private final List fieldSpecParameterBindings; + private final ExpressionEvaluatingParameterBinder parameterBinder; + + /** + * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @param conversionService must not be {@literal null}. + */ + public ReactiveStringBasedMongoQuery(MongoQueryMethod method, ReactiveMongoOperations mongoOperations, + SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider, ConversionService conversionService) { + this(method.getAnnotatedQuery(), method, mongoOperations, expressionParser, evaluationContextProvider, conversionService); + } + + /** + * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod}, + * {@link MongoOperations}, {@link SpelExpressionParser} and {@link EvaluationContextProvider}. + * + * @param query must not be {@literal null}. + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param conversionService must not be {@literal null}. + */ + public ReactiveStringBasedMongoQuery(String query, MongoQueryMethod method, ReactiveMongoOperations mongoOperations, + SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider, ConversionService conversionService) { + + super(method, mongoOperations, conversionService); + + Assert.notNull(query, "Query must not be null!"); + Assert.notNull(expressionParser, "SpelExpressionParser must not be null!"); + + this.queryParameterBindings = new ArrayList(); + this.query = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(query, + this.queryParameterBindings); + + this.fieldSpecParameterBindings = new ArrayList(); + this.fieldSpec = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings( + method.getFieldSpecification(), this.fieldSpecParameterBindings); + + this.isCountQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().count() : false; + this.isDeleteQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().delete() : false; + + if (isCountQuery && isDeleteQuery) { + throw new IllegalArgumentException(String.format(COUND_AND_DELETE, method)); + } + + this.parameterBinder = new ExpressionEvaluatingParameterBinder(expressionParser, evaluationContextProvider); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor) + */ + @Override + protected Query createQuery(ConvertingParameterAccessor accessor) { + + String queryString = parameterBinder.bind(this.query, accessor, + new BindingContext(getQueryMethod().getParameters(), queryParameterBindings)); + String fieldsString = parameterBinder.bind(this.fieldSpec, accessor, + new BindingContext(getQueryMethod().getParameters(), fieldSpecParameterBindings)); + + Query query = new BasicQuery(queryString, fieldsString).with(accessor.getSort()); + + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject())); + } + + return query; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery() + */ + @Override + protected boolean isCountQuery() { + return isCountQuery; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery() + */ + @Override + protected boolean isDeleteQuery() { + return this.isDeleteQuery; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java index 7cbe12c45..95b4234e9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java @@ -39,7 +39,7 @@ import com.mongodb.util.JSON; /** * Query to use a plain JSON String to create the {@link Query} to actually execute. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont @@ -60,7 +60,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery { /** * Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}. - * + * * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. * @param expressionParser must not be {@literal null}. @@ -127,7 +127,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery { return query; } - /* + /* * (non-Javadoc) * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery() */ @@ -147,10 +147,10 @@ public class StringBasedMongoQuery extends AbstractMongoQuery { /** * A parser that extracts the parameter bindings from a given query string. - * + * * @author Thomas Darimont */ - private static enum ParameterBindingParser { + static enum ParameterBindingParser { INSTANCE; @@ -170,7 +170,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery { /** * Returns a list of {@link ParameterBinding}s found in the given {@code input} or an * {@link Collections#emptyList()}. - * + * * @param input can be {@literal null} or empty. * @param bindings must not be {@literal null}. * @return @@ -265,7 +265,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery { int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP)); /* - * The pattern is used as a direct parameter replacement, e.g. 'field': ?1, + * The pattern is used as a direct parameter replacement, e.g. 'field': ?1, * therefore we treat it as not quoted to remain backwards compatible. */ boolean quoted = !string.equals(PARAMETER_PREFIX + paramIndex); @@ -324,7 +324,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery { /** * A generic parameter binding with name or position information. - * + * * @author Thomas Darimont */ static class ParameterBinding { @@ -335,7 +335,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery { /** * Creates a new {@link ParameterBinding} with the given {@code parameterIndex} and {@code quoted} information. - * + * * @param parameterIndex * @param quoted whether or not the parameter is already quoted. */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java index 4ae998db9..7ba26332c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java @@ -23,6 +23,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.IndexOperationsProvider; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.index.Index; import org.springframework.data.mongodb.repository.query.MongoEntityMetadata; @@ -38,23 +39,24 @@ import org.springframework.util.Assert; * refers to. * * @author Oliver Gierke + * @author Mark Paluch */ class IndexEnsuringQueryCreationListener implements QueryCreationListener { private static final Set GEOSPATIAL_TYPES = new HashSet(Arrays.asList(Type.NEAR, Type.WITHIN)); private static final Logger LOG = LoggerFactory.getLogger(IndexEnsuringQueryCreationListener.class); - private final MongoOperations operations; + private final IndexOperationsProvider indexOperationsProvider; /** * Creates a new {@link IndexEnsuringQueryCreationListener} using the given {@link MongoOperations}. * - * @param operations must not be {@literal null}. + * @param indexOperationsProvider must not be {@literal null}. */ - public IndexEnsuringQueryCreationListener(MongoOperations operations) { + public IndexEnsuringQueryCreationListener(IndexOperationsProvider indexOperationsProvider) { - Assert.notNull(operations); - this.operations = operations; + Assert.notNull(indexOperationsProvider); + this.indexOperationsProvider = indexOperationsProvider; } /* @@ -85,7 +87,7 @@ class IndexEnsuringQueryCreationListener implements QueryCreationListener metadata = query.getQueryMethod().getEntityInformation(); - operations.indexOps(metadata.getCollectionName()).ensureIndex(index); + indexOperationsProvider.indexOps(metadata.getCollectionName()).ensureIndex(index); LOG.debug(String.format("Created %s!", index)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java index 03d75f2c8..5bb73b1e8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java @@ -40,18 +40,27 @@ import org.springframework.data.repository.query.EvaluationContextProvider; import org.springframework.data.repository.query.QueryLookupStrategy; import org.springframework.data.repository.query.QueryLookupStrategy.Key; import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import org.springframework.data.repository.reactive.RxJavaCrudRepository; +import org.springframework.data.repository.util.QueryExecutionConverters; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; /** * Factory to create {@link MongoRepository} instances. - * + * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl */ public class MongoRepositoryFactory extends RepositoryFactorySupport { + private static final boolean PROJECT_REACTOR_PRESENT = ClassUtils.isPresent("reactor.core.publisher.Flux", + QueryExecutionConverters.class.getClassLoader()); + private static final boolean RXJAVA_OBSERVABLE_PRESENT = ClassUtils.isPresent("rx.Observable", + QueryExecutionConverters.class.getClassLoader()); + private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); private final MongoOperations operations; @@ -59,7 +68,7 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport { /** * Creates a new {@link MongoRepositoryFactory} with the given {@link MongoOperations}. - * + * * @param mongoOperations must not be {@literal null}. */ public MongoRepositoryFactory(MongoOperations mongoOperations) { @@ -77,6 +86,14 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport { @Override protected Class getRepositoryBaseClass(RepositoryMetadata metadata) { + boolean isReactiveRepository = (PROJECT_REACTOR_PRESENT && ReactiveCrudRepository.class.isAssignableFrom(metadata.getRepositoryInterface())) || ( + RXJAVA_OBSERVABLE_PRESENT && RxJavaCrudRepository.class.isAssignableFrom(metadata.getRepositoryInterface())); + + if (isReactiveRepository) { + return SimpleReactiveMongoRepository.class; + } + + boolean isQueryDslRepository = QUERY_DSL_PRESENT && QueryDslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java index abaee0270..f63cc2026 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java @@ -79,7 +79,7 @@ public class MongoRepositoryFactoryBean, S, ID exten RepositoryFactorySupport factory = getFactoryInstance(operations); if (createIndexesForQueryMethods) { - factory.addQueryCreationListener(new IndexEnsuringQueryCreationListener(operations)); + factory.addQueryCreationListener(new IndexEnsuringQueryCreationListener(collectionName -> operations.indexOps(collectionName))); } return factory; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveChunk.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveChunk.java new file mode 100644 index 000000000..029d7ca8a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveChunk.java @@ -0,0 +1,253 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.support; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.util.Assert; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.MonoProcessor; + +/** + * A reactive chunk of data restricted by the configured {@link Pageable}. + * + * @author Mark Paluch + */ +abstract class ReactiveChunk implements Slice, Serializable { + + private static final long serialVersionUID = 867755909294344406L; + + private final Flux content; + private final MonoProcessor> processor; + private volatile List contentCache; + private final Pageable pageable; + + /** + * Creates a new {@link ReactiveChunk} with the given content and the given governing {@link Pageable}. + * + * @param content must not be {@literal null}. + * @param pageable can be {@literal null}. + */ + public ReactiveChunk(Flux content, Pageable pageable) { + + Assert.notNull(content, "Content must not be null!"); + + this.content = (Flux) content; + this.pageable = pageable; + this.processor = this.content.collectList().doOnSuccess(list -> { + + if (list.size() > pageable.getPageSize()) { + contentCache = list.subList(0, pageable.getPageSize()); + } else { + contentCache = list; + } + }).subscribe(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#getNumber() + */ + public int getNumber() { + return pageable == null ? 0 : pageable.getPageNumber(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#getSize() + */ + public int getSize() { + return pageable == null ? 0 : pageable.getPageSize(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#getNumberOfElements() + */ + public int getNumberOfElements() { + return getContent0().size(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#hasPrevious() + */ + public boolean hasPrevious() { + return getNumber() > 0; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#isFirst() + */ + public boolean isFirst() { + return !hasPrevious(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#isLast() + */ + public boolean isLast() { + return !hasNext(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#nextPageable() + */ + public Pageable nextPageable() { + return hasNext() ? pageable.next() : null; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#previousPageable() + */ + public Pageable previousPageable() { + + if (hasPrevious()) { + return pageable.previousOrFirst(); + } + + return null; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#hasContent() + */ + public boolean hasContent() { + return !getContent0().isEmpty(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#getContent() + */ + public List getContent() { + return Collections.unmodifiableList(getContent0()); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#getSort() + */ + public Sort getSort() { + return pageable == null ? null : pageable.getSort(); + } + + /* + * (non-Javadoc) + * @see java.lang.Iterable#iterator() + */ + public Iterator iterator() { + return getContent0().iterator(); + } + + /** + * Applies the given {@link Converter} to the content of the {@link ReactiveChunk}. + * + * @param converter must not be {@literal null}. + * @return + */ + protected List getConvertedContent(Converter converter) { + + Assert.notNull(converter, "Converter must not be null!"); + + List result = new ArrayList(getContent0().size()); + + for (T element : this) { + result.add(converter.convert(element)); + } + + return result; + } + + protected List getContent0() { + + if (contentCache != null) { + return contentCache; + } + + List list = processor.block(); + + if (list.size() > pageable.getPageSize()) { + return list.subList(0, pageable.getPageSize()); + } + + return list; + } + + /** + * Returns whether the returned list contains more elements than specified by {@link Pageable#getPageSize()}. + * + * @return + */ + protected boolean containsMore() { + + List list = processor.block(); + + return list.size() > pageable.getPageSize(); + } + + /* + * (non-Javadoc) + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + + if (!(obj instanceof ReactiveChunk)) { + return false; + } + + ReactiveChunk that = (ReactiveChunk) obj; + + boolean pageableEqual = this.pageable == null ? that.pageable == null : this.pageable.equals(that.pageable); + + return pageableEqual; + } + + /* + * (non-Javadoc) + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + int result = 17; + + result += 31 * (pageable == null ? 0 : pageable.hashCode()); + + return result; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java new file mode 100644 index 000000000..ad763d71e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java @@ -0,0 +1,230 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.io.Serializable; +import java.lang.reflect.Method; +import java.util.Arrays; + +import org.reactivestreams.Publisher; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.repository.query.MongoQueryMethod; +import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryMethod; +import org.springframework.data.mongodb.repository.query.ReactivePartTreeMongoQuery; +import org.springframework.data.mongodb.repository.query.ReactiveStringBasedMongoQuery; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.repository.core.NamedQueries; +import org.springframework.data.repository.core.RepositoryInformation; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.core.support.RepositoryFactorySupport; +import org.springframework.data.repository.query.EvaluationContextProvider; +import org.springframework.data.repository.query.QueryLookupStrategy; +import org.springframework.data.repository.query.QueryLookupStrategy.Key; +import org.springframework.data.repository.query.RepositoryQuery; +import org.springframework.data.repository.util.QueryExecutionConverters; +import org.springframework.data.repository.util.ReactiveWrapperConverters; +import org.springframework.data.repository.util.ReactiveWrappers; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + * Factory to create {@link org.springframework.data.mongodb.repository.ReactiveMongoRepository} instances. + * + * @author Mark Paluch + * @since 2.0 + */ +public class ReactiveMongoRepositoryFactory extends RepositoryFactorySupport { + + private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); + + private final ReactiveMongoOperations operations; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final ConversionService conversionService; + + /** + * Creates a new {@link ReactiveMongoRepositoryFactory} with the given {@link ReactiveMongoOperations}. + * + * @param mongoOperations must not be {@literal null}. + */ + public ReactiveMongoRepositoryFactory(ReactiveMongoOperations mongoOperations) { + + Assert.notNull(mongoOperations); + + this.operations = mongoOperations; + this.mappingContext = mongoOperations.getConverter().getMappingContext(); + + DefaultConversionService conversionService = new DefaultConversionService(); + QueryExecutionConverters.registerConvertersIn(conversionService); + this.conversionService = conversionService; + setConversionService(conversionService); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getRepositoryBaseClass(org.springframework.data.repository.core.RepositoryMetadata) + */ + @Override + protected Class getRepositoryBaseClass(RepositoryMetadata metadata) { + return SimpleReactiveMongoRepository.class; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getTargetRepository(org.springframework.data.repository.core.RepositoryInformation) + */ + @Override + protected Object getTargetRepository(RepositoryInformation information) { + + MongoEntityInformation entityInformation = getEntityInformation(information.getDomainType(), + information); + return getTargetRepositoryViaReflection(information, entityInformation, operations); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getQueryLookupStrategy(org.springframework.data.repository.query.QueryLookupStrategy.Key, org.springframework.data.repository.query.EvaluationContextProvider) + */ + @Override + protected QueryLookupStrategy getQueryLookupStrategy(Key key, EvaluationContextProvider evaluationContextProvider) { + return new MongoQueryLookupStrategy(operations, evaluationContextProvider, mappingContext, conversionService); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getEntityInformation(java.lang.Class) + */ + public MongoEntityInformation getEntityInformation(Class domainClass) { + return getEntityInformation(domainClass, null); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#validate(org.springframework.data.repository.core.RepositoryMetadata) + */ + @Override + protected void validate(RepositoryMetadata repositoryMetadata) { + + if (!ReactiveWrappers.isAvailable()) { + throw new InvalidDataAccessApiUsageException( + String.format("Cannot implement Repository %s without reactive library support.", + repositoryMetadata.getRepositoryInterface().getName())); + } + + Arrays.stream(repositoryMetadata.getRepositoryInterface().getMethods()) + .forEach(ReactiveMongoRepositoryFactory::validate); + } + + /** + * Reactive MongoDB support requires reactive wrapper support. If return type/parameters are reactive wrapper types, + * then it's required to be able to convert these into Publisher. + * + * @param method the method to validate. + */ + private static void validate(Method method) { + + if (ReactiveWrappers.supports(method.getReturnType()) + && !ClassUtils.isAssignable(Publisher.class, method.getReturnType())) { + + if (!ReactiveWrapperConverters.supports(method.getReturnType())) { + + throw new InvalidDataAccessApiUsageException( + String.format("No reactive type converter found for type %s used in %s, method %s.", + method.getReturnType().getName(), method.getDeclaringClass().getName(), method)); + } + } + + Arrays.stream(method.getParameterTypes()) // + .filter(ReactiveWrappers::supports) // + .filter(parameterType -> !ClassUtils.isAssignable(Publisher.class, parameterType)) // + .filter(parameterType -> !ReactiveWrapperConverters.supports(parameterType)) // + .forEach(parameterType -> { + throw new InvalidDataAccessApiUsageException( + String.format("No reactive type converter found for type %s used in %s, method %s.", + parameterType.getName(), method.getDeclaringClass().getName(), method)); + }); + } + + @SuppressWarnings("unchecked") + private MongoEntityInformation getEntityInformation(Class domainClass, + RepositoryInformation information) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(domainClass); + + if (entity == null) { + throw new MappingException( + String.format("Could not lookup mapping metadata for domain class %s!", domainClass.getName())); + } + + return new MappingMongoEntityInformation((MongoPersistentEntity) entity, + information != null ? (Class) information.getIdType() : null); + } + + /** + * {@link QueryLookupStrategy} to create {@link PartTreeMongoQuery} instances. + * + * @author Mark Paluch + */ + private static class MongoQueryLookupStrategy implements QueryLookupStrategy { + + private final ReactiveMongoOperations operations; + private final EvaluationContextProvider evaluationContextProvider; + MappingContext, MongoPersistentProperty> mappingContext; + final ConversionService conversionService; + + MongoQueryLookupStrategy(ReactiveMongoOperations operations, EvaluationContextProvider evaluationContextProvider, + MappingContext, MongoPersistentProperty> mappingContext, + ConversionService conversionService) { + + this.operations = operations; + this.evaluationContextProvider = evaluationContextProvider; + this.mappingContext = mappingContext; + this.conversionService = conversionService; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.projection.ProjectionFactory, org.springframework.data.repository.core.NamedQueries) + */ + @Override + public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory, + NamedQueries namedQueries) { + + MongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, metadata, factory, mappingContext); + String namedQueryName = queryMethod.getNamedQueryName(); + + if (namedQueries.hasQuery(namedQueryName)) { + String namedQuery = namedQueries.getQuery(namedQueryName); + return new ReactiveStringBasedMongoQuery(namedQuery, queryMethod, operations, EXPRESSION_PARSER, + evaluationContextProvider, conversionService); + } else if (queryMethod.hasAnnotatedQuery()) { + return new ReactiveStringBasedMongoQuery(queryMethod, operations, EXPRESSION_PARSER, evaluationContextProvider, + conversionService); + } else { + return new ReactivePartTreeMongoQuery(queryMethod, operations, conversionService); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java new file mode 100644 index 000000000..bce55f3fe --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java @@ -0,0 +1,120 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.support; + +import java.io.Serializable; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport; +import org.springframework.data.repository.core.support.RepositoryFactorySupport; +import org.springframework.util.Assert; + +/** + * {@link org.springframework.beans.factory.FactoryBean} to create + * {@link org.springframework.data.mongodb.repository.ReactiveMongoRepository} instances. + * + * @author Mark Paluch + * @since 2.0 + * @see org.springframework.data.repository.reactive.ReactivePagingAndSortingRepository + * @see org.springframework.data.repository.reactive.RxJavaPagingAndSortingRepository + */ +public class ReactiveMongoRepositoryFactoryBean, S, ID extends Serializable> + extends RepositoryFactoryBeanSupport { + + private ReactiveMongoOperations operations; + private boolean createIndexesForQueryMethods = false; + private boolean mappingContextConfigured = false; + + /** + * Configures the {@link ReactiveMongoOperations} to be used. + * + * @param operations the operations to set + */ + public void setReactiveMongoOperations(ReactiveMongoOperations operations) { + this.operations = operations; + } + + /** + * Configures whether to automatically create indexes for the properties referenced in a query method. + * + * @param createIndexesForQueryMethods the createIndexesForQueryMethods to set + */ + public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods) { + this.createIndexesForQueryMethods = createIndexesForQueryMethods; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext) + */ + @Override + protected void setMappingContext(MappingContext mappingContext) { + + super.setMappingContext(mappingContext); + this.mappingContextConfigured = true; + } + + /* + * (non-Javadoc) + * + * @see + * org.springframework.data.repository.support.RepositoryFactoryBeanSupport + * #createRepositoryFactory() + */ + @Override + protected final RepositoryFactorySupport createRepositoryFactory() { + + RepositoryFactorySupport factory = getFactoryInstance(operations); + + if (createIndexesForQueryMethods) { + factory.addQueryCreationListener( + new IndexEnsuringQueryCreationListener(collectionName -> operations.indexOps(collectionName))); + } + + return factory; + } + + /** + * Creates and initializes a {@link RepositoryFactorySupport} instance. + * + * @param operations + * @return + */ + protected RepositoryFactorySupport getFactoryInstance(ReactiveMongoOperations operations) { + return new ReactiveMongoRepositoryFactory(operations); + } + + /* + * (non-Javadoc) + * + * @see + * org.springframework.data.repository.support.RepositoryFactoryBeanSupport + * #afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() { + + super.afterPropertiesSet(); + Assert.notNull(operations, "ReactiveMongoOperations must not be null!"); + + if (!mappingContextConfigured) { + setMappingContext(operations.getConverter().getMappingContext()); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageImpl.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageImpl.java new file mode 100644 index 000000000..5ca3f5956 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageImpl.java @@ -0,0 +1,169 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.List; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.core.publisher.MonoProcessor; + +/** + * Reactive {@code Page} implementation. + * + * @param the type of which the page consists. + * @author Mark Paluch + * @since 2.0 + */ +public class ReactivePageImpl extends ReactiveChunk implements Page { + + private static final long serialVersionUID = 867755909294344406L; + + private final MonoProcessor totalMono; + private volatile Long totalValueCache; + private final Pageable pageable; + + /** + * Constructor of {@code PageImpl}. + * + * @param content the content of this page, must not be {@literal null}. + * @param pageable the paging information, can be {@literal null}. + * @param totalMono the total amount of items available. The total might be adapted considering the length of the + * content given, if it is going to be the content of the last page. This is in place to mitigate + * inconsistencies + */ + public ReactivePageImpl(Flux content, Pageable pageable, Mono totalMono) { + + super(content, pageable); + + this.pageable = pageable; + this.totalMono = totalMono.subscribe(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Page#getTotalPages() + */ + @Override + public int getTotalPages() { + return getSize() == 0 ? 1 : (int) Math.ceil((double) getTotal0() / (double) getSize()); + } + + private long getTotal0() { + + if (totalValueCache == null) { + long total = totalMono.block(); + List content = getContent(); + this.totalValueCache = !content.isEmpty() && pageable != null + && pageable.getOffset() + pageable.getPageSize() > total ? pageable.getOffset() + content.size() : total; + + } + + return totalValueCache; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Page#getTotalElements() + */ + @Override + public long getTotalElements() { + return getTotal0(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#hasNext() + */ + @Override + public boolean hasNext() { + return getNumber() + 1 < getTotalPages(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#isLast() + */ + @Override + public boolean isLast() { + return !hasNext(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.domain.Slice#transform(org.springframework.core.convert.converter.Converter) + */ + @Override + public Page map(Converter converter) { + return new ReactivePageImpl(Flux.fromIterable(getConvertedContent(converter)), pageable, Mono.just(getTotal0())); + } + + /* + * (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + String contentType = "UNKNOWN"; + List content = getContent(); + + if (content.size() > 0) { + contentType = content.get(0).getClass().getName(); + } + + return String.format("Page %s of %d containing %s instances", getNumber() + 1, getTotalPages(), contentType); + } + + /* + * (non-Javadoc) + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + + if (this == obj) { + return true; + } + + if (!(obj instanceof ReactivePageImpl)) { + return false; + } + + ReactivePageImpl that = (ReactivePageImpl) obj; + + return getTotal0() == that.getTotal0() && super.equals(obj); + } + + /* + * (non-Javadoc) + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + + int result = 17; + + result += 31 * (int) (getTotal0() ^ getTotal0() >>> 32); + result += 31 * super.hashCode(); + + return result; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSliceImpl.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSliceImpl.java new file mode 100644 index 000000000..e2648e3e8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSliceImpl.java @@ -0,0 +1,66 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.List; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.SliceImpl; + +import reactor.core.publisher.Flux; + +/** + * Reactive {@code Page} implementation. + * + * @param the type of which the page consists. + * @author Mark Paluch + * @since 2.0 + */ +public class ReactiveSliceImpl extends ReactiveChunk { + + private static final long serialVersionUID = 867755909294344406L; + + private final Pageable pageable; + + public ReactiveSliceImpl(Flux content, Pageable pageable) { + + super(content, pageable); + + this.pageable = pageable; + } + + public boolean hasNext() { + return containsMore(); + } + + public Slice map(Converter converter) { + return new SliceImpl<>(this.getConvertedContent(converter), pageable, this.hasNext()); + } + + public String toString() { + + String contentType = "UNKNOWN"; + List content = this.getContent(); + if (content.size() > 0) { + contentType = content.get(0).getClass().getName(); + } + + return String.format("Slice %d containing %s instances", + this.getNumber(), contentType); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java new file mode 100644 index 000000000..5cbb5c12f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java @@ -0,0 +1,352 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.util.Assert; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Reactive repository base implementation for Mongo. + * + * @author Mark Paluch + * @since 2.0 + */ +public class SimpleReactiveMongoRepository implements ReactiveMongoRepository { + + private final ReactiveMongoOperations mongoOperations; + private final MongoEntityInformation entityInformation; + + /** + * Creates a new {@link SimpleReactiveMongoRepository} for the given {@link MongoEntityInformation} and + * {@link ReactiveMongoOperations}. + * + * @param metadata must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + */ + public SimpleReactiveMongoRepository(MongoEntityInformation metadata, + ReactiveMongoOperations mongoOperations) { + + Assert.notNull(mongoOperations); + Assert.notNull(metadata); + + this.entityInformation = metadata; + this.mongoOperations = mongoOperations; + } + + public Mono findOne(ID id) { + + Assert.notNull(id, "The given id must not be null!"); + + return mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName()); + } + + public Mono findOne(Mono mono) { + + Assert.notNull(mono, "The given id must not be null!"); + + return mono.then( + id -> mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName())); + } + + public Mono findOne(Example example) { + + Assert.notNull(example, "Sample must not be null!"); + + Query q = new Query(new Criteria().alike(example)); + return mongoOperations.findOne(q, example.getProbeType(), entityInformation.getCollectionName()); + } + + public Mono exists(ID id) { + + Assert.notNull(id, "The given id must not be null!"); + + return mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(), + entityInformation.getCollectionName()); + } + + public Mono exists(Mono mono) { + + Assert.notNull(mono, "The given id must not be null!"); + + return mono.then(id -> mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(), + entityInformation.getCollectionName())); + + } + + public Mono exists(Example example) { + + Assert.notNull(example, "Sample must not be null!"); + + Query q = new Query(new Criteria().alike(example)); + return mongoOperations.exists(q, example.getProbeType(), entityInformation.getCollectionName()); + } + + @Override + public Flux findAll() { + return findAll(new Query()); + } + + @Override + public Flux findAll(Iterable ids) { + + Assert.notNull(ids, "The given Iterable of Id's must not be null!"); + + Set parameters = new HashSet(tryDetermineRealSizeOrReturn(ids, 10)); + for (ID id : ids) { + parameters.add(id); + } + + return findAll(new Query(new Criteria(entityInformation.getIdAttribute()).in(parameters))); + } + + @Override + public Flux findAll(Publisher idStream) { + + Assert.notNull(idStream, "The given Publisher of Id's must not be null!"); + + return Flux.from(idStream).buffer().flatMap(this::findAll); + } + + @Override + public Mono> findAll(Pageable pageable) { + + Assert.notNull(pageable, "The given Pageable must not be null!"); + + Mono count = count(); + Flux content = findAll(new Query().with(pageable)); + + return Mono.fromCallable(() -> new ReactivePageImpl<>(content, pageable, count)); + } + + @Override + public Flux findAll(Sort sort) { + return findAll(new Query().with(sort)); + } + + @Override + public Flux findAll(Example example, Sort sort) { + + Assert.notNull(example, "Sample must not be null!"); + + Query q = new Query(new Criteria().alike(example)); + + if (sort != null) { + q.with(sort); + } + + return mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName()); + } + + @Override + public Flux findAll(Example example) { + return findAll(example, null); + } + + public Mono count() { + return mongoOperations.count(new Query(), entityInformation.getCollectionName()); + } + + public Mono count(Example example) { + + Assert.notNull(example, "Sample must not be null!"); + + Query q = new Query(new Criteria().alike(example)); + return mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName()); + } + + @Override + public Mono insert(S entity) { + + Assert.notNull(entity, "Entity must not be null!"); + + return mongoOperations.insert(entity, entityInformation.getCollectionName()); + } + + @Override + public Flux insert(Iterable entities) { + + Assert.notNull(entities, "The given Iterable of entities must not be null!"); + + List list = convertIterableToList(entities); + + if (list.isEmpty()) { + return Flux.empty(); + } + + return Flux.from(mongoOperations.insertAll(list)); + } + + @Override + public Flux insert(Publisher entities) { + + Assert.notNull(entities, "The given Publisher of entities must not be null!"); + + return Flux.from(entities).flatMap(entity -> { + return mongoOperations.insert(entity, entityInformation.getCollectionName()); + }); + } + + public Mono save(S entity) { + + Assert.notNull(entity, "Entity must not be null!"); + + if (entityInformation.isNew(entity)) { + return mongoOperations.insert(entity, entityInformation.getCollectionName()); + } + + return mongoOperations.save(entity, entityInformation.getCollectionName()); + } + + public Flux save(Iterable entities) { + + Assert.notNull(entities, "The given Iterable of entities must not be null!"); + + List result = convertIterableToList(entities); + boolean allNew = true; + + for (S entity : entities) { + if (allNew && !entityInformation.isNew(entity)) { + allNew = false; + } + } + + if (allNew) { + return Flux.from(mongoOperations.insertAll(result)); + } + + List> monos = new ArrayList<>(); + for (S entity : result) { + monos.add(save(entity)); + } + + return Flux.merge(monos); + } + + @Override + public Flux save(Publisher entityStream) { + + Assert.notNull(entityStream, "The given Publisher of entities must not be null!"); + + return Flux.from(entityStream).flatMap(entity -> { + + if (entityInformation.isNew(entity)) { + return mongoOperations.insert(entity, entityInformation.getCollectionName()).then(aVoid -> Mono.just(entity)); + } + + return mongoOperations.save(entity, entityInformation.getCollectionName()).then(aVoid -> Mono.just(entity)); + }); + } + + public Mono delete(ID id) { + + Assert.notNull(id, "The given id must not be null!"); + + return mongoOperations + .remove(getIdQuery(id), entityInformation.getJavaType(), entityInformation.getCollectionName()) + .then(); + } + + public Mono delete(T entity) { + + Assert.notNull(entity, "The given entity must not be null!"); + + return delete(entityInformation.getId(entity)); + } + + public Mono delete(Iterable entities) { + + Assert.notNull(entities, "The given Iterable of entities must not be null!"); + + return Flux.fromIterable(entities).flatMap(entity -> delete(entityInformation.getId(entity))).then(); + } + + @Override + public Mono delete(Publisher entityStream) { + + Assert.notNull(entityStream, "The given Publisher of entities must not be null!"); + + return Flux.from(entityStream).flatMap(entity -> delete(entityInformation.getId(entity))).then(); + } + + public Mono deleteAll() { + return mongoOperations.remove(new Query(), entityInformation.getCollectionName()) + .then(deleteResult -> Mono.empty()); + } + + private Query getIdQuery(Object id) { + return new Query(getIdCriteria(id)); + } + + private Criteria getIdCriteria(Object id) { + return where(entityInformation.getIdAttribute()).is(id); + } + + private Flux findAll(Query query) { + + if (query == null) { + return Flux.empty(); + } + + return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + } + + private static List convertIterableToList(Iterable entities) { + + if (entities instanceof List) { + return (List) entities; + } + + int capacity = tryDetermineRealSizeOrReturn(entities, 10); + + if (capacity == 0 || entities == null) { + return Collections.emptyList(); + } + + List list = new ArrayList(capacity); + for (T entity : entities) { + list.add(entity); + } + + return list; + } + + private static int tryDetermineRealSizeOrReturn(Iterable iterable, int defaultSize) { + return iterable == null ? 0 : (iterable instanceof Collection) ? ((Collection) iterable).size() : defaultSize; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java index 2bb1bd0a0..d4a6ce7c5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java @@ -18,11 +18,9 @@ package org.springframework.data.mongodb.config; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; -import example.first.First; -import example.second.Second; - import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Set; import org.junit.Rule; @@ -45,11 +43,15 @@ import org.springframework.test.util.ReflectionTestUtils; import com.mongodb.Mongo; import com.mongodb.MongoClient; +import example.first.First; +import example.second.Second; + /** * Unit tests for {@link AbstractMongoConfiguration}. * * @author Oliver Gierke * @author Thomas Darimont + * @author Mark Paluch */ public class AbstractMongoConfigurationUnitTests { @@ -63,7 +65,7 @@ public class AbstractMongoConfigurationUnitTests { AbstractMongoConfiguration configuration = new SampleMongoConfiguration(); assertThat(configuration.getMappingBasePackage(), is(SampleMongoConfiguration.class.getPackage().getName())); - assertThat(configuration.getInitialEntitySet(), hasSize(1)); + assertThat(configuration.getInitialEntitySet(), hasSize(2)); assertThat(configuration.getInitialEntitySet(), hasItem(Entity.class)); } @@ -72,9 +74,7 @@ public class AbstractMongoConfigurationUnitTests { */ @Test public void doesNotScanPackageIfMappingPackageIsNull() throws ClassNotFoundException { - assertScanningDisabled(null); - } /** @@ -169,12 +169,12 @@ public class AbstractMongoConfigurationUnitTests { AbstractMongoConfiguration configuration = new SampleMongoConfiguration() { @Override - protected String getMappingBasePackage() { - return value; + protected Collection getMappingBasePackages() { + return Collections.singleton(value); } }; - assertThat(configuration.getMappingBasePackage(), is(value)); + assertThat(configuration.getMappingBasePackages(), hasItem(value)); assertThat(configuration.getInitialEntitySet(), hasSize(0)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java new file mode 100644 index 000000000..3dcb3c25d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java @@ -0,0 +1,69 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.AssertionsForInterfaceTypes.*; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; + +/** + * Integration tests for {@link AbstractReactiveMongoConfiguration}. + * + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration(classes = AbstractReactiveMongoConfigurationIntegrationTests.ReactiveConfiguration.class) +public class AbstractReactiveMongoConfigurationIntegrationTests { + + @Autowired ApplicationContext context; + + /** + * @see DATAMONGO-1444 + */ + @Test + public void contextShouldContainTemplate() { + + assertThat(context.getBean(SimpleReactiveMongoDatabaseFactory.class)).isNotNull(); + assertThat(context.getBean(ReactiveMongoOperations.class)).isNotNull(); + assertThat(context.getBean(ReactiveMongoTemplate.class)).isNotNull(); + } + + @Configuration + static class ReactiveConfiguration extends AbstractReactiveMongoConfiguration { + + @Override + public MongoClient mongoClient() { + return MongoClients.create(); + } + + @Override + protected String getDatabaseName() { + return "database"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java new file mode 100644 index 000000000..f73f4da6e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java @@ -0,0 +1,226 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.config; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.beans.factory.NoSuchBeanDefinitionException; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.support.AbstractApplicationContext; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoTypeMapper; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.Mongo; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; + +import example.first.First; +import example.second.Second; + +/** + * Unit tests for {@link AbstractReactiveMongoConfiguration}. + * + * @author Mark Paluch + */ +public class AbstractReactiveMongoConfigurationUnitTests { + + @Rule public ExpectedException exception = ExpectedException.none(); + + /** + * @see DATAMONGO-1444 + */ + @Test + public void usesConfigClassPackageAsBaseMappingPackage() throws ClassNotFoundException { + + AbstractReactiveMongoConfiguration configuration = new SampleMongoConfiguration(); + assertThat(configuration.getMappingBasePackages(), hasItem(SampleMongoConfiguration.class.getPackage().getName())); + assertThat(configuration.getInitialEntitySet(), hasSize(2)); + assertThat(configuration.getInitialEntitySet(), hasItem(Entity.class)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void doesNotScanPackageIfMappingPackageIsNull() throws ClassNotFoundException { + + assertScanningDisabled(null); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void doesNotScanPackageIfMappingPackageIsEmpty() throws ClassNotFoundException { + + assertScanningDisabled(""); + assertScanningDisabled(" "); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void containsMongoDbFactoryButNoMongoBean() { + + AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); + + assertThat(context.getBean(SimpleReactiveMongoDatabaseFactory.class), is(notNullValue())); + + exception.expect(NoSuchBeanDefinitionException.class); + context.getBean(Mongo.class); + context.close(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void returnsUninitializedMappingContext() throws Exception { + + SampleMongoConfiguration configuration = new SampleMongoConfiguration(); + MongoMappingContext context = configuration.mongoMappingContext(); + + assertThat(context.getPersistentEntities(), is(emptyIterable())); + context.initialize(); + assertThat(context.getPersistentEntities(), is(not(emptyIterable()))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void lifecycleCallbacksAreInvokedInAppropriateOrder() { + + AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); + MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class); + BasicMongoPersistentEntity entity = mappingContext.getPersistentEntity(Entity.class); + StandardEvaluationContext spElContext = (StandardEvaluationContext) ReflectionTestUtils.getField(entity, "context"); + + assertThat(spElContext.getBeanResolver(), is(notNullValue())); + context.close(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldBeAbleToConfigureCustomTypeMapperViaJavaConfig() { + + AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); + MongoTypeMapper typeMapper = context.getBean(CustomMongoTypeMapper.class); + MappingMongoConverter mmc = context.getBean(MappingMongoConverter.class); + + assertThat(mmc, is(notNullValue())); + assertThat(mmc.getTypeMapper(), is(typeMapper)); + context.close(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + @SuppressWarnings("unchecked") + public void allowsMultipleEntityBasePackages() throws ClassNotFoundException { + + ConfigurationWithMultipleBasePackages config = new ConfigurationWithMultipleBasePackages(); + Set> entities = config.getInitialEntitySet(); + + assertThat(entities, hasSize(2)); + assertThat(entities, hasItems(First.class, Second.class)); + } + + private static void assertScanningDisabled(final String value) throws ClassNotFoundException { + + AbstractReactiveMongoConfiguration configuration = new SampleMongoConfiguration() { + @Override + protected Collection getMappingBasePackages() { + return Collections.singleton(value); + } + }; + + assertThat(configuration.getMappingBasePackages(), hasItem(value)); + assertThat(configuration.getInitialEntitySet(), hasSize(0)); + } + + @Configuration + static class SampleMongoConfiguration extends AbstractReactiveMongoConfiguration { + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Override + public MongoClient mongoClient() { + return MongoClients.create(); + } + + @Bean + @Override + public MappingMongoConverter mappingMongoConverter() throws Exception { + + MappingMongoConverter converter = super.mappingMongoConverter(); + converter.setTypeMapper(typeMapper()); + + return converter; + } + + @Bean + public MongoTypeMapper typeMapper() { + return new CustomMongoTypeMapper(); + } + } + + static class ConfigurationWithMultipleBasePackages extends AbstractReactiveMongoConfiguration { + + @Override + protected String getDatabaseName() { + return "test"; + } + + @Override + public MongoClient mongoClient() { + return MongoClients.create(); + } + + @Override + protected Collection getMappingBasePackages() { + return Arrays.asList("example.first", "example.second"); + } + } + + @Document + static class Entity {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java index 288e09cb9..c0c0f0a1a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java @@ -39,7 +39,7 @@ import com.mongodb.MongoClient; /** * Integration tests for auditing via Java config. - * + * * @author Thomas Darimont * @author Oliver Gierke */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java index 8688ea6ec..b48e5c77d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java @@ -54,8 +54,7 @@ public class DefaultIndexOperationsIntegrationTests { this.collection = this.template.getDb().getCollection(collectionName, Document.class); this.collection.dropIndexes(); - - this.indexOps = new DefaultIndexOperations(template, collectionName); + this.indexOps = new DefaultIndexOperations(template.getMongoDbFactory(), collectionName); } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 84bca0031..53d6d81fb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -78,7 +78,7 @@ import com.mongodb.client.model.UpdateOptions; /** * Unit tests for {@link MongoTemplate}. - * + * * @author Oliver Gierke * @author Christoph Strobl */ @@ -291,7 +291,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests { GenericApplicationContext applicationContext = new GenericApplicationContext(); applicationContext.getBeanFactory().registerSingleton("foo", - new MongoPersistentEntityIndexCreator(new MongoMappingContext(), factory)); + new MongoPersistentEntityIndexCreator(new MongoMappingContext(), template)); applicationContext.refresh(); GenericApplicationContext spy = spy(applicationContext); @@ -601,7 +601,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests { /** * Mocks out the {@link MongoTemplate#getDb()} method to return the {@link DB} mock instead of executing the actual * behaviour. - * + * * @return */ private MongoTemplate mockOutGetDb() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java index de8b22176..e97490111 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java @@ -38,7 +38,7 @@ import com.mongodb.MongoClient; /** * Integration tests for DATAMONGO-1289. - * + * * @author Christoph Strobl */ @RunWith(SpringJUnit4ClassRunner.class) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java index e9ff22c9f..39008a2ab 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java @@ -40,7 +40,7 @@ import com.mongodb.client.FindIterable; /** * Unit tests for {@link QueryCursorPreparer}. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch @@ -49,6 +49,7 @@ import com.mongodb.client.FindIterable; public class QueryCursorPreparerUnitTests { @Mock MongoDbFactory factory; + @Mock MongoExceptionTranslator exceptionTranslatorMock; @Mock FindIterable cursor; @Mock FindIterable cursorToUse; @@ -56,6 +57,7 @@ public class QueryCursorPreparerUnitTests { @Before public void setUp() { + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock); when(cursor.batchSize(anyInt())).thenReturn(cursor); when(cursor.filter(any(Document.class))).thenReturn(cursor); when(cursor.limit(anyInt())).thenReturn(cursor); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java new file mode 100644 index 000000000..8fb8e29d2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java @@ -0,0 +1,242 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import static com.sun.prism.impl.Disposer.*; +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; +import static org.junit.Assume.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import com.mongodb.MongoException; +import com.mongodb.ReadPreference; +import com.mongodb.reactivestreams.client.MongoDatabase; + +import reactor.core.publisher.Flux; +import reactor.test.TestSubscriber; + +/** + * Integration test for {@link ReactiveMongoTemplate} execute methods. + * + * @author Mark Paluch + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class ReactiveMongoTemplateExecuteTests { + + private static final org.springframework.data.util.Version THREE = org.springframework.data.util.Version.parse("3.0"); + + @Autowired SimpleReactiveMongoDatabaseFactory factory; + @Autowired ReactiveMongoOperations operations; + + @Rule public ExpectedException thrown = ExpectedException.none(); + + org.springframework.data.util.Version mongoVersion; + + @Before + public void setUp() { + cleanUp(); + + if (mongoVersion == null) { + org.bson.Document result = operations.executeCommand("{ buildInfo: 1 }").block(); + mongoVersion = org.springframework.data.util.Version.parse(result.get("version").toString()); + } + } + + @After + public void tearDown() { + + operations.dropCollection("person").block(); + operations.dropCollection(Person.class).block(); + operations.dropCollection("execute_test").block(); + operations.dropCollection("execute_test1").block(); + operations.dropCollection("execute_test2").block(); + operations.dropCollection("execute_index_test").block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeCommandJsonCommandShouldReturnSingleResponse() throws Exception { + + Document document = operations.executeCommand("{ buildInfo: 1 }").block(); + + assertThat(document, hasKey("version")); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeCommandDocumentCommandShouldReturnSingleResponse() throws Exception { + + Document document = operations.executeCommand(new Document("buildInfo", 1)).block(); + + assertThat(document, hasKey("version")); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeCommandJsonCommandShouldReturnMultipleResponses() throws Exception { + + assumeTrue(mongoVersion.isGreaterThan(THREE)); + + operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").block(); + + TestSubscriber subscriber = TestSubscriber.create(); + operations.executeCommand("{ find: 'execute_test'}").subscribe(subscriber); + + subscriber.awaitAndAssertNextValueCount(1); + subscriber.assertValuesWith(document -> { + + assertThat(document, hasKey("waitedMS")); + assertThat(document, hasKey("cursor")); + }); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeCommandJsonCommandShouldTranslateExceptions() throws Exception { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(operations.executeCommand("{ unknown: 1 }")); + + testSubscriber.await().assertError(InvalidDataAccessApiUsageException.class); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeCommandDocumentCommandShouldTranslateExceptions() throws Exception { + + TestSubscriber testSubscriber = TestSubscriber + .subscribe(operations.executeCommand(new Document("unknown", 1))); + + testSubscriber.await().assertError(InvalidDataAccessApiUsageException.class); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeCommandWithReadPreferenceCommandShouldTranslateExceptions() throws Exception { + + TestSubscriber testSubscriber = TestSubscriber + .subscribe(operations.executeCommand(new Document("unknown", 1), ReadPreference.nearest())); + + testSubscriber.await().assertError(InvalidDataAccessApiUsageException.class); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeOnDatabaseShouldExecuteCommand() throws Exception { + + operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").block(); + operations.executeCommand("{ insert: 'execute_test1', documents: [{},{},{}]}").block(); + operations.executeCommand("{ insert: 'execute_test2', documents: [{},{},{}]}").block(); + + Flux execute = operations.execute(MongoDatabase::listCollections); + + List documents = execute.filter(document -> document.getString("name").startsWith("execute_test")) + .collectList().block(); + + assertThat(documents, hasSize(3)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeOnDatabaseShouldDeferExecution() throws Exception { + + operations.execute(db -> { + throw new MongoException(50, "hi there"); + }); + + // the assertion here is that the exception is not thrown + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeOnDatabaseShouldShouldTranslateExceptions() throws Exception { + + TestSubscriber testSubscriber = TestSubscriber.create(); + + Flux execute = operations.execute(db -> { + throw new MongoException(50, "hi there"); + }); + + execute.subscribe(testSubscriber); + + testSubscriber.await().assertError(UncategorizedMongoDbException.class); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeOnCollectionWithTypeShouldReturnFindResults() throws Exception { + + operations.executeCommand("{ insert: 'person', documents: [{},{},{}]}").block(); + + TestSubscriber testSubscriber = TestSubscriber.create(); + + Flux execute = operations.execute(Person.class, collection -> collection.find()); + execute.subscribe(testSubscriber); + + testSubscriber.awaitAndAssertNextValueCount(3).assertComplete(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executeOnCollectionWithNameShouldReturnFindResults() throws Exception { + + operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").block(); + + TestSubscriber testSubscriber = TestSubscriber.create(); + + Flux execute = operations.execute("execute_test", collection -> collection.find()); + execute.subscribe(testSubscriber); + + testSubscriber.awaitAndAssertNextValueCount(3).assertComplete(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java new file mode 100644 index 000000000..9fb52c348 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java @@ -0,0 +1,210 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.index.Index.Duplicates; +import org.springframework.data.mongodb.core.index.IndexField; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.util.Version; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import com.mongodb.reactivestreams.client.ListIndexesPublisher; +import com.mongodb.reactivestreams.client.MongoCollection; + +import lombok.Data; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.TestSubscriber; + +/** + * Integration test for {@link MongoTemplate}. + * + * @author Mark Paluch + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class ReactiveMongoTemplateIndexTests { + + private static final org.springframework.data.util.Version TWO_DOT_EIGHT = org.springframework.data.util.Version + .parse("2.8"); + + @Autowired SimpleReactiveMongoDatabaseFactory factory; + @Autowired ReactiveMongoTemplate template; + + @Rule public ExpectedException thrown = ExpectedException.none(); + + Version mongoVersion; + + @Before + public void setUp() { + cleanDb(); + queryMongoVersionIfNecessary(); + } + + @After + public void cleanUp() {} + + private void queryMongoVersionIfNecessary() { + + if (mongoVersion == null) { + org.bson.Document result = template.executeCommand("{ buildInfo: 1 }").block(); + mongoVersion = Version.parse(result.get("version").toString()); + } + } + + private void cleanDb() { + template.dropCollection(Person.class).block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + @SuppressWarnings("deprecation") + public void testEnsureIndexShouldCreateIndex() { + + Person p1 = new Person("Oliver"); + p1.setAge(25); + template.insert(p1); + Person p2 = new Person("Sven"); + p2.setAge(40); + template.insert(p2); + + template.reactiveIndexOps(Person.class).ensureIndex(new Index().on("age", Direction.DESC).unique()) + .block(); + + MongoCollection coll = template.getCollection(template.getCollectionName(Person.class)); + List indexInfo = Flux.from(coll.listIndexes()).collectList().block(); + + assertThat(indexInfo.size(), is(2)); + Object indexKey = null; + boolean unique = false; + for (org.bson.Document ix : indexInfo) { + + if ("age_-1".equals(ix.get("name"))) { + indexKey = ix.get("key"); + unique = (Boolean) ix.get("unique"); + } + } + assertThat(((org.bson.Document) indexKey), hasEntry("age", -1)); + assertThat(unique, is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + @SuppressWarnings("deprecation") + public void getIndexInfoShouldReturnCorrectIndex() { + + Person p1 = new Person("Oliver"); + p1.setAge(25); + template.insert(p1).block(); + + template.reactiveIndexOps(Person.class).ensureIndex(new Index().on("age", Direction.DESC).unique()) + .block(); + + List indexInfoList = Flux.from(template.reactiveIndexOps(Person.class).getIndexInfo()).collectList() + .block(); + assertThat(indexInfoList.size(), is(2)); + + IndexInfo ii = indexInfoList.get(1); + assertThat(ii.isUnique(), is(true)); + assertThat(ii.isDropDuplicates(), is(false)); + assertThat(ii.isSparse(), is(false)); + + List indexFields = ii.getIndexFields(); + IndexField field = indexFields.get(0); + + assertThat(field, is(IndexField.create("age", Direction.DESC))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() { + + String command = "db." + template.getCollectionName(Person.class) + + ".createIndex({'age':-1}, {'unique':true, 'sparse':true}), 1"; + template.reactiveIndexOps(Person.class).dropAllIndexes().block(); + + TestSubscriber subscriber = TestSubscriber + .subscribe(template.reactiveIndexOps(Person.class).getIndexInfo()); + subscriber.await().assertComplete().assertNoValues(); + + Mono.from(factory.getMongoDatabase().runCommand(new org.bson.Document("eval", command))).block(); + + ListIndexesPublisher listIndexesPublisher = template + .getCollection(template.getCollectionName(Person.class)).listIndexes(); + List indexInfo = Flux.from(listIndexesPublisher).collectList().block(); + org.bson.Document indexKey = null; + boolean unique = false; + + for (Document document : indexInfo) { + + if ("age_-1".equals(document.get("name"))) { + indexKey = (org.bson.Document) document.get("key"); + unique = (Boolean) document.get("unique"); + } + } + + assertThat(indexKey, hasEntry("age", -1D)); + assertThat(unique, is(true)); + + List indexInfos = template.reactiveIndexOps(Person.class).getIndexInfo().collectList().block(); + + IndexInfo info = indexInfos.get(1); + assertThat(info.isUnique(), is(true)); + assertThat(info.isSparse(), is(true)); + + List indexFields = info.getIndexFields(); + IndexField field = indexFields.get(0); + + assertThat(field, is(IndexField.create("age", Direction.DESC))); + } + + @Data + static class Sample { + + @Id String id; + String field; + + public Sample() {} + + public Sample(String id, String field) { + this.id = id; + this.field = field; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java new file mode 100644 index 000000000..48e926cee --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java @@ -0,0 +1,1023 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.geo.Metrics; +import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mongodb.core.MongoTemplateTests.PersonWithConvertedId; +import org.springframework.data.mongodb.core.MongoTemplateTests.VersionedPerson; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.util.Version; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import com.mongodb.WriteConcern; + +import lombok.Data; +import reactor.core.Cancellation; +import reactor.core.publisher.Flux; +import reactor.test.TestSubscriber; + +/** + * Integration test for {@link MongoTemplate}. + * + * @author Mark Paluch + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class ReactiveMongoTemplateTests { + + @Autowired SimpleReactiveMongoDatabaseFactory factory; + @Autowired ReactiveMongoTemplate template; + + @Rule public ExpectedException thrown = ExpectedException.none(); + + Version mongoVersion; + + @Before + public void setUp() { + cleanDb(); + queryMongoVersionIfNecessary(); + } + + @After + public void cleanUp() {} + + private void queryMongoVersionIfNecessary() { + + if (mongoVersion == null) { + org.bson.Document result = template.executeCommand("{ buildInfo: 1 }").block(); + mongoVersion = org.springframework.data.util.Version.parse(result.get("version").toString()); + } + } + + private void cleanDb() { + template.dropCollection("people") // + .and(template.dropCollection("collection")) // + .and(template.dropCollection(Person.class)) // + .and(template.dropCollection(Venue.class)) // + .and(template.dropCollection(PersonWithAList.class)) // + .and(template.dropCollection(PersonWithIdPropertyOfTypeObjectId.class)) // + .and(template.dropCollection(PersonWithVersionPropertyOfTypeInteger.class)) // + .and(template.dropCollection(Sample.class)).block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertSetsId() throws Exception { + + PersonWithAList person = new PersonWithAList(); + assert person.getId() == null; + + template.insert(person).block(); + + assertThat(person.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertAllSetsId() throws Exception { + + PersonWithAList person = new PersonWithAList(); + assert person.getId() == null; + + template.insertAll(Collections.singletonList(person)).next().block(); + + assertThat(person.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertCollectionSetsId() throws Exception { + + PersonWithAList person = new PersonWithAList(); + assert person.getId() == null; + + template.insert(Collections.singletonList(person), PersonWithAList.class).next().block(); + + assertThat(person.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void saveSetsId() throws Exception { + + PersonWithAList person = new PersonWithAList(); + assert person.getId() == null; + + template.save(person).block(); + + assertThat(person.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertsSimpleEntityCorrectly() throws Exception { + + Person person = new Person("Mark"); + person.setAge(35); + template.insert(person).block(); + + TestSubscriber testSubscriber = TestSubscriber.create(); + Flux flux = template.find(new Query(Criteria.where("_id").is(person.getId())), Person.class); + flux.subscribe(testSubscriber); + + testSubscriber.awaitAndAssertNextValueCount(1); + testSubscriber.assertValues(person); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void simpleInsertDoesNotAllowArrays() throws Exception { + + thrown.expect(IllegalArgumentException.class); + + Person person = new Person("Mark"); + person.setAge(35); + template.insert(new Person[] { person }); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void simpleInsertDoesNotAllowCollections() throws Exception { + + thrown.expect(IllegalArgumentException.class); + + Person person = new Person("Mark"); + person.setAge(35); + template.insert(Collections.singletonList(person)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertsSimpleEntityWithSuppliedCollectionNameCorrectly() throws Exception { + + Person person = new Person("Homer"); + person.setAge(35); + template.insert(person, "people").block(); + + TestSubscriber testSubscriber = TestSubscriber.create(); + Flux flux = template.find(new Query(Criteria.where("_id").is(person.getId())), Person.class, "people"); + flux.subscribe(testSubscriber); + + testSubscriber.awaitAndAssertNextValueCount(1); + testSubscriber.assertValues(person); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertBatchCorrectly() throws Exception { + + List persons = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); + + template.insertAll(persons).next().block(); + + TestSubscriber testSubscriber = TestSubscriber.create(); + Flux flux = template.find(new Query().with(new Sort(new Order("firstname"))), Person.class); + flux.subscribe(testSubscriber); + + testSubscriber.awaitAndAssertNextValueCount(3); + testSubscriber.assertValues(persons.toArray(new Person[persons.size()])); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertBatchWithSuppliedCollectionNameCorrectly() throws Exception { + + List persons = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); + + template.insert(persons, "people").then().block(); + + TestSubscriber testSubscriber = TestSubscriber.create(); + Flux flux = template.find(new Query().with(new Sort(new Order("firstname"))), Person.class, "people"); + flux.subscribe(testSubscriber); + + testSubscriber.awaitAndAssertNextValueCount(3); + testSubscriber.assertValues(persons.toArray(new Person[persons.size()])); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertBatchWithSuppliedEntityTypeCorrectly() throws Exception { + + List persons = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); + + template.insert(persons, Person.class).then().block(); + + TestSubscriber testSubscriber = TestSubscriber.create(); + Flux flux = template.find(new Query().with(new Sort(new Order("firstname"))), Person.class); + flux.subscribe(testSubscriber); + + testSubscriber.awaitAndAssertNextValueCount(3); + testSubscriber.assertValues(persons.toArray(new Person[persons.size()])); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void testAddingToList() { + + PersonWithAList p = new PersonWithAList(); + p.setFirstName("Sven"); + p.setAge(22); + template.insert(p).block(); + + Query q1 = new Query(Criteria.where("id").is(p.getId())); + PersonWithAList p2 = template.findOne(q1, PersonWithAList.class).block(); + assertThat(p2, notNullValue()); + assertThat(p2.getWishList().size(), is(0)); + + p2.addToWishList("please work!"); + + template.save(p2).block(); + + PersonWithAList p3 = template.findOne(q1, PersonWithAList.class).block(); + assertThat(p3, notNullValue()); + assertThat(p3.getWishList().size(), is(1)); + + Friend f = new Friend(); + p.setFirstName("Erik"); + p.setAge(21); + + p3.addFriend(f); + template.save(p3).block(); + + PersonWithAList p4 = template.findOne(q1, PersonWithAList.class).block(); + assertThat(p4, notNullValue()); + assertThat(p4.getWishList().size(), is(1)); + assertThat(p4.getFriends().size(), is(1)); + + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void testFindOneWithSort() { + PersonWithAList p = new PersonWithAList(); + p.setFirstName("Sven"); + p.setAge(22); + template.insert(p).block(); + + PersonWithAList p2 = new PersonWithAList(); + p2.setFirstName("Erik"); + p2.setAge(21); + template.insert(p2).block(); + + PersonWithAList p3 = new PersonWithAList(); + p3.setFirstName("Mark"); + p3.setAge(40); + template.insert(p3).block(); + + // test query with a sort + Query q2 = new Query(Criteria.where("age").gt(10)); + q2.with(new Sort(Direction.DESC, "age")); + PersonWithAList p5 = template.findOne(q2, PersonWithAList.class).block(); + assertThat(p5.getFirstName(), is("Mark")); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void bogusUpdateDoesNotTriggerException() throws Exception { + + ReactiveMongoTemplate mongoTemplate = new ReactiveMongoTemplate(factory); + mongoTemplate.setWriteResultChecking(WriteResultChecking.EXCEPTION); + + Person person = new Person("Oliver2"); + person.setAge(25); + mongoTemplate.insert(person).block(); + + Query q = new Query(Criteria.where("BOGUS").gt(22)); + Update u = new Update().set("firstName", "Sven"); + mongoTemplate.updateFirst(q, u, Person.class).block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void updateFirstByEntityTypeShouldUpdateObject() throws Exception { + + Person person = new Person("Oliver2", 25); + template.insert(person) // + .then(template.updateFirst(new Query(Criteria.where("age").is(25)), new Update().set("firstName", "Sven"), + Person.class)) // + .flatMap(p -> template.find(new Query(Criteria.where("age").is(25)), Person.class)) + .subscribeWith(TestSubscriber.create()) // + .await() // + .assertValuesWith(result -> { + assertThat(result.getFirstName(), is(equalTo("Sven"))); + }); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void updateFirstByCollectionNameShouldUpdateObjects() throws Exception { + + Person person = new Person("Oliver2", 25); + template.insert(person, "people") // + .then(template.updateFirst(new Query(Criteria.where("age").is(25)), new Update().set("firstName", "Sven"), + "people")) // + .flatMap(p -> template.find(new Query(Criteria.where("age").is(25)), Person.class, "people")) + .subscribeWith(TestSubscriber.create()) // + .await() // + .assertValuesWith(result -> { + assertThat(result.getFirstName(), is(equalTo("Sven"))); + }); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void updateMultiByEntityTypeShouldUpdateObjects() throws Exception { + + Query query = new Query(new Criteria().orOperator(Criteria.where("firstName").is("Walter Jr"), + Criteria.where("firstName").is("Walter"))); + + template.insertAll(Flux.just(new Person("Walter", 50), new Person("Skyler", 43), new Person("Walter Jr", 16))) // + .collectList() // + .flatMap(a -> template.updateMulti(query, new Update().set("firstName", "Walt"), Person.class)) // + .flatMap(p -> template.find(new Query(Criteria.where("firstName").is("Walt")), Person.class)) // + .subscribeWith(TestSubscriber.create()) // + .awaitAndAssertNextValueCount(2); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void updateMultiByCollectionNameShouldUpdateObject() throws Exception { + + Query query = new Query(new Criteria().orOperator(Criteria.where("firstName").is("Walter Jr"), + Criteria.where("firstName").is("Walter"))); + + template + .insert(Flux.just(new Person("Walter", 50), new Person("Skyler", 43), new Person("Walter Jr", 16)), "people") // + .collectList() // + .flatMap(a -> template.updateMulti(query, new Update().set("firstName", "Walt"), Person.class, "people")) // + .flatMap(p -> template.find(new Query(Criteria.where("firstName").is("Walt")), Person.class, "people")) // + .subscribeWith(TestSubscriber.create()) // + .awaitAndAssertNextValueCount(2); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void throwsExceptionForDuplicateIds() { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); + template.setWriteResultChecking(WriteResultChecking.EXCEPTION); + + Person person = new Person(new ObjectId(), "Amol"); + person.setAge(28); + + template.insert(person).block(); + + try { + template.insert(person).block(); + fail("Expected DataIntegrityViolationException!"); + } catch (DataIntegrityViolationException e) { + assertThat(e.getMessage(), containsString("E11000 duplicate key error")); + } + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void throwsExceptionForUpdateWithInvalidPushOperator() { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); + template.setWriteResultChecking(WriteResultChecking.EXCEPTION); + + ObjectId id = new ObjectId(); + Person person = new Person(id, "Amol"); + person.setAge(28); + + template.insert(person).block(); + + thrown.expect(DataIntegrityViolationException.class); + thrown.expectMessage("array"); + thrown.expectMessage("age"); + // thrown.expectMessage("failed"); + + Query query = new Query(Criteria.where("firstName").is("Amol")); + Update upd = new Update().push("age", 29); + template.updateFirst(query, upd, Person.class).block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void rejectsDuplicateIdInInsertAll() { + + thrown.expect(DataIntegrityViolationException.class); + thrown.expectMessage("E11000 duplicate key error"); + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); + template.setWriteResultChecking(WriteResultChecking.EXCEPTION); + + ObjectId id = new ObjectId(); + Person person = new Person(id, "Amol"); + person.setAge(28); + + List records = new ArrayList<>(); + records.add(person); + records.add(person); + + template.insertAll(records).next().block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void testFindAndUpdate() { + + template.insertAll(Arrays.asList(new Person("Tom", 21), new Person("Dick", 22), new Person("Harry", 23))).next() + .block(); + + Query query = new Query(Criteria.where("firstName").is("Harry")); + Update update = new Update().inc("age", 1); + + Person p = template.findAndModify(query, update, Person.class).block(); // return old + assertThat(p.getFirstName(), is("Harry")); + assertThat(p.getAge(), is(23)); + p = template.findOne(query, Person.class).block(); + assertThat(p.getAge(), is(24)); + + p = template.findAndModify(query, update, Person.class, "person").block(); + assertThat(p.getAge(), is(24)); + p = template.findOne(query, Person.class).block(); + assertThat(p.getAge(), is(25)); + + p = template.findAndModify(query, update, new FindAndModifyOptions().returnNew(true), Person.class).block(); + assertThat(p.getAge(), is(26)); + + p = template.findAndModify(query, update, null, Person.class, "person").block(); + assertThat(p.getAge(), is(26)); + p = template.findOne(query, Person.class).block(); + assertThat(p.getAge(), is(27)); + + Query query2 = new Query(Criteria.where("firstName").is("Mary")); + p = template.findAndModify(query2, update, new FindAndModifyOptions().returnNew(true).upsert(true), Person.class) + .block(); + assertThat(p.getFirstName(), is("Mary")); + assertThat(p.getAge(), is(1)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void testFindAllAndRemoveFullyReturnsAndRemovesDocuments() { + + Sample spring = new Sample("100", "spring"); + Sample data = new Sample("200", "data"); + Sample mongodb = new Sample("300", "mongodb"); + template.insert(Arrays.asList(spring, data, mongodb), Sample.class).then().block(); + + Query qry = query(where("field").in("spring", "mongodb")); + + TestSubscriber testSubscriber = TestSubscriber.create(); + template.findAllAndRemove(qry, Sample.class).subscribe(testSubscriber); + + testSubscriber.awaitAndAssertNextValueCount(2); + testSubscriber.assertValues(spring, mongodb); + + assertThat(template.findOne(new Query(), Sample.class).block(), is(equalTo(data))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = OptimisticLockingFailureException.class) + public void optimisticLockingHandling() { + + // Init version + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.age = 29; + person.firstName = "Patryk"; + template.save(person).block(); + + List result = Flux + .from(template.findAll(PersonWithVersionPropertyOfTypeInteger.class)).collectList().block(); + + assertThat(result, hasSize(1)); + assertThat(result.get(0).version, is(0)); + + // Version change + person = result.get(0); + person.firstName = "Patryk2"; + + template.save(person).block(); + + assertThat(person.version, is(1)); + + result = Flux.from(template.findAll(PersonWithVersionPropertyOfTypeInteger.class)).collectList().block(); + + assertThat(result, hasSize(1)); + assertThat(result.get(0).version, is(1)); + + // Optimistic lock exception + person.version = 0; + person.firstName = "Patryk3"; + + template.save(person).block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void optimisticLockingHandlingWithExistingId() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.id = new ObjectId().toString(); + person.age = 29; + person.firstName = "Patryk"; + template.save(person); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void doesNotFailOnVersionInitForUnversionedEntity() { + + org.bson.Document dbObject = new org.bson.Document(); + dbObject.put("firstName", "Oliver"); + + template.insert(dbObject, template.determineCollectionName(PersonWithVersionPropertyOfTypeInteger.class)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void removesObjectFromExplicitCollection() { + + String collectionName = "explicit"; + template.remove(new Query(), collectionName).block(); + + PersonWithConvertedId person = new PersonWithConvertedId(); + person.name = "Dave"; + template.save(person, collectionName).block(); + assertThat(template.findAll(PersonWithConvertedId.class, collectionName).next().block(), is(notNullValue())); + + template.remove(person, collectionName).block(); + assertThat(template.findAll(PersonWithConvertedId.class, collectionName).next().block(), is(nullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void savesMapCorrectly() { + + Map map = new HashMap<>(); + map.put("key", "value"); + + template.save(map, "maps").block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = MappingException.class) + public void savesMongoPrimitiveObjectCorrectly() { + template.save(new Object(), "collection").block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = IllegalArgumentException.class) + public void rejectsNullObjectToBeSaved() { + template.save((Object) null); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void savesPlainDbObjectCorrectly() { + + org.bson.Document dbObject = new org.bson.Document("foo", "bar"); + template.save(dbObject, "collection").block(); + + assertThat(dbObject.containsKey("_id"), is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = InvalidDataAccessApiUsageException.class) + public void rejectsPlainObjectWithOutExplicitCollection() { + + org.bson.Document dbObject = new org.bson.Document("foo", "bar"); + template.save(dbObject, "collection").block(); + + template.findById(dbObject.get("_id"), org.bson.Document.class).block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void readsPlainDbObjectById() { + + org.bson.Document dbObject = new org.bson.Document("foo", "bar"); + template.save(dbObject, "collection").block(); + + org.bson.Document result = template.findById(dbObject.get("_id"), org.bson.Document.class, "collection").block(); + assertThat(result.get("foo"), is(dbObject.get("foo"))); + assertThat(result.get("_id"), is(dbObject.get("_id"))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void geoNear() { + + List venues = Arrays.asList(new Venue("Penn Station", -73.99408, 40.75057), // + new Venue("10gen Office", -73.99171, 40.738868), // + new Venue("Flatiron Building", -73.988135, 40.741404), // + new Venue("Maplewood, NJ", -74.2713, 40.73137)); + + template.insertAll(venues).blockLast(); + template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2D)); + + NearQuery geoFar = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(150, Metrics.KILOMETERS); + + template.geoNear(geoFar, Venue.class) // + .subscribeWith(TestSubscriber.create()) // + .awaitAndAssertNextValueCount(4); + + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(120, Metrics.KILOMETERS); + + template.geoNear(geoNear, Venue.class) // + .subscribeWith(TestSubscriber.create()) // + .await() // + .assertValueCount(3); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void writesPlainString() { + template.save("{ 'foo' : 'bar' }", "collection").block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = MappingException.class) + public void rejectsNonJsonStringForSave() { + template.save("Foobar!", "collection").block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void initializesVersionOnInsert() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.insert(person).block(); + + assertThat(person.version, is(0)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void initializesVersionOnBatchInsert() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.insertAll(Collections.singletonList(person)).next().block(); + + assertThat(person.version, is(0)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void queryCantBeNull() { + + List result = Flux + .from(template.findAll(PersonWithIdPropertyOfTypeObjectId.class)).collectList().block(); + assertThat(template.find(null, PersonWithIdPropertyOfTypeObjectId.class).collectList().block(), is(result)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void versionsObjectIntoDedicatedCollection() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.save(person, "personX").block(); + assertThat(person.version, is(0)); + + template.save(person, "personX").block(); + assertThat(person.version, is(1)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void correctlySetsLongVersionProperty() { + + PersonWithVersionPropertyOfTypeLong person = new PersonWithVersionPropertyOfTypeLong(); + person.firstName = "Dave"; + + template.save(person).block(); + assertThat(person.version, is(0L)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void throwsExceptionForIndexViolationIfConfigured() { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); + template.setWriteResultChecking(WriteResultChecking.EXCEPTION); + template.reactiveIndexOps(Person.class).ensureIndex(new Index().on("firstName", Direction.DESC).unique()).block(); + + Person person = new Person(new ObjectId(), "Amol"); + person.setAge(28); + + template.save(person).block(); + + person = new Person(new ObjectId(), "Amol"); + person.setAge(28); + + try { + template.save(person).block(); + fail("Expected DataIntegrityViolationException!"); + } catch (DataIntegrityViolationException e) { + assertThat(e.getMessage(), containsString("E11000 duplicate key error")); + } + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = DuplicateKeyException.class) + public void preventsDuplicateInsert() { + + template.setWriteConcern(WriteConcern.MAJORITY); + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.save(person).block(); + assertThat(person.version, is(0)); + + person.version = null; + template.save(person).block(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void countAndFindWithoutTypeInformation() { + + Person person = new Person(); + template.save(person).block(); + + Query query = query(where("_id").is(person.getId())); + String collectionName = template.getCollectionName(Person.class); + + assertThat(Flux.from(template.find(query, HashMap.class, collectionName)).collectList().block(), hasSize(1)); + assertThat(template.count(query, collectionName).block(), is(1L)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void nullsPropertiesForVersionObjectUpdates() { + + VersionedPerson person = new VersionedPerson(); + person.firstname = "Dave"; + person.lastname = "Matthews"; + + template.save(person).block(); + assertThat(person.id, is(notNullValue())); + + person.lastname = null; + template.save(person).block(); + + person = template.findOne(query(where("id").is(person.id)), VersionedPerson.class).block(); + assertThat(person.lastname, is(nullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void nullsValuesForUpdatesOfUnversionedEntity() { + + Person person = new Person("Dave"); + template.save(person).block(); + + person.setFirstName(null); + template.save(person).block(); + + person = template.findOne(query(where("id").is(person.getId())), Person.class).block(); + assertThat(person.getFirstName(), is(nullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void savesJsonStringCorrectly() { + + org.bson.Document dbObject = new org.bson.Document().append("first", "first").append("second", "second"); + + template.save(dbObject, "collection").block(); + + org.bson.Document result = template.findAll(org.bson.Document.class, "collection").next().block(); + assertThat(result.containsKey("first"), is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void executesExistsCorrectly() { + + Sample sample = new Sample(); + template.save(sample).block(); + + Query query = query(where("id").is(sample.id)); + + assertThat(template.exists(query, Sample.class).block(), is(true)); + assertThat(template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class)).block(), + is(true)); + assertThat(template.exists(query, Sample.class, template.getCollectionName(Sample.class)).block(), is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void tailStreamsData() throws InterruptedException { + + template.dropCollection("capped").block(); + template.createCollection("capped", new CollectionOptions(1000, 10, true)).block(); + template.insert(new Document("random", Math.random()).append("key", "value"), "capped").block(); + + BlockingQueue documents = new LinkedBlockingQueue<>(1000); + + Flux capped = template.tail(null, Document.class, "capped"); + + Cancellation cancellation = capped.doOnNext(documents::add).subscribe(); + + assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); + assertThat(documents.isEmpty(), is(true)); + + cancellation.dispose(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void tailStreamsDataUntilCancellation() throws InterruptedException { + + template.dropCollection("capped").block(); + template.createCollection("capped", new CollectionOptions(1000, 10, true)).block(); + template.insert(new Document("random", Math.random()).append("key", "value"), "capped").block(); + + BlockingQueue documents = new LinkedBlockingQueue<>(1000); + + Flux capped = template.tail(null, Document.class, "capped"); + + Cancellation cancellation = capped.doOnNext(documents::add).subscribe(); + + assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); + assertThat(documents.isEmpty(), is(true)); + + template.insert(new Document("random", Math.random()).append("key", "value"), "capped").block(); + assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); + + cancellation.dispose(); + + template.insert(new Document("random", Math.random()).append("key", "value"), "capped").block(); + assertThat(documents.poll(1, TimeUnit.SECONDS), is(nullValue())); + } + + @Data + static class Sample { + + @Id String id; + String field; + + public Sample() {} + + public Sample(String id, String field) { + this.id = id; + this.field = field; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java new file mode 100644 index 000000000..2cb9d3338 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Unit tests for {@link ReactiveMongoTemplate}. + * + * @author Mark Paluch + */ +@RunWith(MockitoJUnitRunner.class) +public class ReactiveMongoTemplateUnitTests { + + ReactiveMongoTemplate template; + + @Mock SimpleReactiveMongoDatabaseFactory factory; + @Mock MongoClient mongoClient; + + MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + MappingMongoConverter converter; + MongoMappingContext mappingContext; + + @Before + public void setUp() { + + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); + + this.mappingContext = new MongoMappingContext(); + this.converter = new MappingMongoConverter(new NoOpDbRefResolver(), mappingContext); + this.template = new ReactiveMongoTemplate(factory, converter); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = IllegalArgumentException.class) + public void rejectsNullDatabaseName() throws Exception { + new ReactiveMongoTemplate(mongoClient, null); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = IllegalArgumentException.class) + public void rejectsNullMongo() throws Exception { + new ReactiveMongoTemplate(null, "database"); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void defaultsConverterToMappingMongoConverter() throws Exception { + ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoClient, "database"); + assertTrue(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDocumentCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDocumentCallbackUnitTests.java index c0cf0dcd2..5145b4c51 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDocumentCallbackUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDocumentCallbackUnitTests.java @@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; +import static org.mockito.Mockito.when; import org.bson.Document; import org.junit.Before; @@ -32,7 +33,7 @@ import org.springframework.data.mongodb.core.mapping.MongoMappingContext; /** * Unit tests for {@link UnwrapAndReadDocumentCallback}. - * + * * @author Oliver Gierke * @author Mark Paluch */ @@ -40,12 +41,15 @@ import org.springframework.data.mongodb.core.mapping.MongoMappingContext; public class UnwrapAndReadDocumentCallbackUnitTests { @Mock MongoDbFactory factory; + @Mock MongoExceptionTranslator exceptionTranslatorMock; UnwrapAndReadDocumentCallback callback; @Before public void setUp() { + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock); + MongoTemplate template = new MongoTemplate(factory); MappingMongoConverter converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), new MongoMappingContext()); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 566a446c3..75b65ff86 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -259,7 +259,7 @@ public class MappingMongoConverterUnitTests { List enums = (List) result.get("enums"); assertThat(enums.size(), is(1)); - assertThat((String) enums.get(0), is("FIRST")); + assertThat(enums.get(0), is("FIRST")); } /** @@ -387,7 +387,7 @@ public class MappingMongoConverterUnitTests { Object localeField = document.get("locale"); assertThat(localeField, is(instanceOf(String.class))); - assertThat((String) localeField, is("en_US")); + assertThat(localeField, is("en_US")); LocaleWrapper read = converter.read(LocaleWrapper.class, document); assertThat(read.locale, is(Locale.US)); @@ -584,7 +584,7 @@ public class MappingMongoConverterUnitTests { public void convertsObjectsIfNecessary() { ObjectId id = new ObjectId(); - assertThat(converter.convertToMongoType(id), is((Object) id)); + assertThat(converter.convertToMongoType(id), is(id)); } /** @@ -608,7 +608,7 @@ public class MappingMongoConverterUnitTests { List value = (List) foo; assertThat(value.size(), is(1)); - assertThat((String) value.get(0), is("Bar")); + assertThat(value.get(0), is("Bar")); } /** @@ -647,7 +647,7 @@ public class MappingMongoConverterUnitTests { BasicDBList value = (BasicDBList) foo; assertThat(value.size(), is(1)); - assertThat((String) value.get(0), is("Bar")); + assertThat(value.get(0), is("Bar")); } /** @@ -677,7 +677,7 @@ public class MappingMongoConverterUnitTests { ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); Object firstObjectInFoo = ((List) result.mapOfObjects.get("Foo")).get(0); assertThat(firstObjectInFoo, is(instanceOf(Map.class))); - assertThat((String) ((Map) firstObjectInFoo).get("Hello"), is(equalTo("World"))); + assertThat(((Map) firstObjectInFoo).get("Hello"), is(equalTo("World"))); } /** @@ -697,7 +697,7 @@ public class MappingMongoConverterUnitTests { assertThat(foo, is(instanceOf(Map.class))); Object doublyNestedObject = ((Map) foo).get("nested"); assertThat(doublyNestedObject, is(instanceOf(Map.class))); - assertThat((String) ((Map) doublyNestedObject).get("Hello"), is(equalTo("World"))); + assertThat(((Map) doublyNestedObject).get("Hello"), is(equalTo("World"))); } /** @@ -719,7 +719,7 @@ public class MappingMongoConverterUnitTests { assertThat(firstObjectInFoo, is(instanceOf(Map.class))); Object doublyNestedObject = ((Map) firstObjectInFoo).get("nested"); assertThat(doublyNestedObject, is(instanceOf(Map.class))); - assertThat((String) ((Map) doublyNestedObject).get("Hello"), is(equalTo("World"))); + assertThat(((Map) doublyNestedObject).get("Hello"), is(equalTo("World"))); } /** @@ -784,7 +784,7 @@ public class MappingMongoConverterUnitTests { BasicDBList list = (BasicDBList) result.get("Foo"); assertThat(list.size(), is(1)); - assertThat(list.get(0), is((Object) Locale.US.toString())); + assertThat(list.get(0), is(Locale.US.toString())); } /** @@ -886,17 +886,17 @@ public class MappingMongoConverterUnitTests { converter.write(a, result); - assertThat((String) result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName())); - assertThat((String) result.get("valueType"), is(HashMap.class.getName())); + assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName())); + assertThat(result.get("valueType"), is(HashMap.class.getName())); org.bson.Document object = (org.bson.Document) result.get("value"); assertThat(object, is(notNullValue())); org.bson.Document inner = (org.bson.Document) object.get("test"); assertThat(inner, is(notNullValue())); - assertThat((String) inner.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName())); - assertThat((String) inner.get("valueType"), is(String.class.getName())); - assertThat((String) inner.get("value"), is("testValue")); + assertThat(inner.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName())); + assertThat(inner.get("valueType"), is(String.class.getName())); + assertThat(inner.get("value"), is("testValue")); } @Test @@ -908,7 +908,7 @@ public class MappingMongoConverterUnitTests { org.bson.Document result = new org.bson.Document(); converter.write(value, result); - assertThat(result.get("_id"), is((Object) 5)); + assertThat(result.get("_id"), is(5)); } /** @@ -919,7 +919,7 @@ public class MappingMongoConverterUnitTests { public void writesNullValuesForCollection() { CollectionWrapper wrapper = new CollectionWrapper(); - wrapper.contacts = Arrays. asList(new Person(), null); + wrapper.contacts = Arrays.asList(new Person(), null); org.bson.Document result = new org.bson.Document(); converter.write(wrapper, result); @@ -1161,7 +1161,7 @@ public class MappingMongoConverterUnitTests { converter.write(wrapper, sink); - assertThat(sink.get("url"), is((Object) "http://springsource.org")); + assertThat(sink.get("url"), is("http://springsource.org")); } /** @@ -1192,7 +1192,7 @@ public class MappingMongoConverterUnitTests { Object idField = document.get("_id"); assertThat(idField, is(notNullValue())); assertThat(idField, is(instanceOf(org.bson.Document.class))); - assertThat(((org.bson.Document) idField).get("innerId"), is((Object) 4711L)); + assertThat(((org.bson.Document) idField).get("innerId"), is(4711L)); } /** @@ -1544,7 +1544,7 @@ public class MappingMongoConverterUnitTests { org.bson.Document map = getAsDocument(result, "treeMapOfPersons"); org.bson.Document entry = getAsDocument(map, "key"); - assertThat(entry.get("foo"), is((Object) "Dave")); + assertThat(entry.get("foo"), is("Dave")); } /** @@ -1771,7 +1771,7 @@ public class MappingMongoConverterUnitTests { ClassWithGeoShape result = converter.read(ClassWithGeoShape.class, document); assertThat(result, is(notNullValue())); - assertThat(result.shape, is((Shape) sphere)); + assertThat(result.shape, is(sphere)); } /** @@ -1883,8 +1883,8 @@ public class MappingMongoConverterUnitTests { org.bson.Document sink = new org.bson.Document(); converter.write(source, sink); - assertThat((String) sink.get("_id"), is("rootId")); - assertThat((org.bson.Document) sink.get("nested"), is(new org.bson.Document().append("id", "nestedId"))); + assertThat(sink.get("_id"), is("rootId")); + assertThat(sink.get("nested"), is(new org.bson.Document().append("id", "nestedId"))); } /** @@ -1973,7 +1973,7 @@ public class MappingMongoConverterUnitTests { converter.write(type, result); - assertThat(getAsDocument(result, "string"), is((org.bson.Document) new org.bson.Document())); + assertThat(getAsDocument(result, "string"), is(new org.bson.Document())); org.bson.Document localDateTime = getAsDocument(result, "localDateTime"); assertThat(localDateTime.get("value"), is(instanceOf(Date.class))); @@ -2103,7 +2103,7 @@ public class MappingMongoConverterUnitTests { EnumMap enumMap; } - static enum SampleEnum { + enum SampleEnum { FIRST { @Override void method() {} @@ -2118,7 +2118,7 @@ public class MappingMongoConverterUnitTests { abstract void method(); } - static interface InterfaceType { + interface InterfaceType { } @@ -2396,8 +2396,8 @@ public class MappingMongoConverterUnitTests { static class ClassWithMapUsingEnumAsKey { - static enum FooBarEnum { - FOO, BAR; + enum FooBarEnum { + FOO, BAR } Map map; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java index 529828fe4..46a976d71 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java @@ -36,6 +36,7 @@ import org.springframework.dao.DataIntegrityViolationException; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.SimpleMongoDbFactory; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.mapping.Document; @@ -51,7 +52,7 @@ import com.mongodb.MongoCommandException; /** * Integration tests for {@link MongoPersistentEntityIndexCreator}. - * + * * @author Oliver Gierke * @author Christoph Strobl * @author Thomas Darimont @@ -107,8 +108,10 @@ public class MongoPersistentEntityIndexCreatorIntegrationTests { expectedException.expectMessage("lastname"); expectedException.expectCause(IsInstanceOf. instanceOf(MongoCommandException.class)); + MongoTemplate mongoTemplate = new MongoTemplate(new MongoClient(), "issue"); + MongoPersistentEntityIndexCreator indexCreator = new MongoPersistentEntityIndexCreator(new MongoMappingContext(), - new SimpleMongoDbFactory(new MongoClient(), "issue")); + mongoTemplate); indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", new Index().named("stormlight") .on("lastname", Direction.ASC).unique(), "datamongo-1125")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java index e79d1fbc7..3167dfc1e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java @@ -38,7 +38,9 @@ import org.springframework.dao.DataAccessException; import org.springframework.data.geo.Point; import org.springframework.data.mapping.context.MappingContextEvent; import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.core.DefaultIndexOperations; import org.springframework.data.mongodb.core.MongoExceptionTranslator; +import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; @@ -52,12 +54,13 @@ import com.mongodb.client.model.IndexOptions; /** * Unit tests for {@link MongoPersistentEntityIndexCreator}. - * + * * @author Oliver Gierke * @author Philipp Schneider * @author Johno Crawford * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch */ @RunWith(MockitoJUnitRunner.class) public class MongoPersistentEntityIndexCreatorUnitTests { @@ -65,7 +68,8 @@ public class MongoPersistentEntityIndexCreatorUnitTests { private @Mock MongoDbFactory factory; private @Mock ApplicationContext context; private @Mock MongoDatabase db; - private @Mock MongoCollection collection; + private @Mock MongoCollection collection; + private MongoTemplate mongoTemplate; ArgumentCaptor keysCaptor; ArgumentCaptor optionsCaptor; @@ -79,7 +83,10 @@ public class MongoPersistentEntityIndexCreatorUnitTests { collectionCaptor = ArgumentCaptor.forClass(String.class); when(factory.getDb()).thenReturn(db); - when(db.getCollection(collectionCaptor.capture(), eq(Document.class))).thenReturn(collection); + when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(db.getCollection(collectionCaptor.capture())).thenReturn(collection); + + mongoTemplate = new MongoTemplate(factory); when(collection.createIndex(keysCaptor.capture(), optionsCaptor.capture())).thenReturn("OK"); } @@ -89,7 +96,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { MongoMappingContext mappingContext = prepareMappingContext(Person.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); assertThat(keysCaptor.getValue(), is(notNullValue())); assertThat(keysCaptor.getValue().keySet(), hasItem("fieldname")); @@ -104,7 +111,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { MongoMappingContext mappingContext = new MongoMappingContext(); MongoMappingContext personMappingContext = prepareMappingContext(Person.class); - MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, factory); + MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); MongoPersistentEntity entity = personMappingContext.getPersistentEntity(Person.class); MappingContextEvent, MongoPersistentProperty> event = new MappingContextEvent, MongoPersistentProperty>( @@ -124,7 +131,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { MongoMappingContext mappingContext = new MongoMappingContext(); mappingContext.initialize(); - MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, factory); + MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); assertThat(creator.isIndexCreatorFor(mappingContext), is(true)); assertThat(creator.isIndexCreatorFor(new MongoMappingContext()), is(false)); } @@ -136,7 +143,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { public void triggersBackgroundIndexingIfConfigured() { MongoMappingContext mappingContext = prepareMappingContext(AnotherPerson.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); assertThat(keysCaptor.getValue(), is(notNullValue())); assertThat(keysCaptor.getValue().keySet(), hasItem("lastname")); @@ -152,7 +159,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { public void expireAfterSecondsIfConfigured() { MongoMappingContext mappingContext = prepareMappingContext(Milk.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); assertThat(keysCaptor.getValue(), is(notNullValue())); assertThat(keysCaptor.getValue().keySet(), hasItem("expiry")); @@ -166,7 +173,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { public void createsNotNestedGeoSpatialIndexCorrectly() { MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); assertThat(keysCaptor.getValue(), equalTo(new org.bson.Document().append("company.address.location", "2d"))); @@ -184,7 +191,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { public void autoGeneratedIndexNameShouldGenerateNoName() { MongoMappingContext mappingContext = prepareMappingContext(EntityWithGeneratedIndexName.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); assertThat(keysCaptor.getValue().containsKey("name"), is(false)); assertThat(keysCaptor.getValue().keySet(), hasItem("lastname")); @@ -199,11 +206,11 @@ public class MongoPersistentEntityIndexCreatorUnitTests { public void indexCreationShouldNotCreateNewCollectionForNestedGeoSpatialIndexStructures() { MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); ArgumentCaptor collectionNameCapturer = ArgumentCaptor.forClass(String.class); - verify(db, times(1)).getCollection(collectionNameCapturer.capture(), eq(Document.class)); + verify(db, times(1)).getCollection(collectionNameCapturer.capture()); assertThat(collectionNameCapturer.getValue(), equalTo("wrapper")); } @@ -214,11 +221,11 @@ public class MongoPersistentEntityIndexCreatorUnitTests { public void indexCreationShouldNotCreateNewCollectionForNestedIndexStructures() { MongoMappingContext mappingContext = prepareMappingContext(IndexedDocumentWrapper.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); ArgumentCaptor collectionNameCapturer = ArgumentCaptor.forClass(String.class); - verify(db, times(1)).getCollection(collectionNameCapturer.capture(), eq(Document.class)); + verify(db, times(1)).getCollection(collectionNameCapturer.capture()); assertThat(collectionNameCapturer.getValue(), equalTo("indexedDocumentWrapper")); } @@ -234,7 +241,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { MongoMappingContext mappingContext = prepareMappingContext(Person.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); } /** @@ -249,7 +256,7 @@ public class MongoPersistentEntityIndexCreatorUnitTests { MongoMappingContext mappingContext = prepareMappingContext(Person.class); - new MongoPersistentEntityIndexCreator(mappingContext, factory); + new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); } private static MongoMappingContext prepareMappingContext(Class type) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java new file mode 100644 index 000000000..aeee6fd4f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java @@ -0,0 +1,995 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.performance; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.util.Assert.*; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.Before; +import org.junit.Test; +import org.springframework.core.Constants; +import org.springframework.core.convert.support.GenericConversionService; +import org.springframework.data.annotation.PersistenceConstructor; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.convert.DbRefProxyHandler; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DbRefResolverCallback; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; +import org.springframework.util.StopWatch; +import org.springframework.util.StringUtils; + +import com.mongodb.BasicDBList; +import com.mongodb.BasicDBObject; +import com.mongodb.DBRef; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + + +/** + * Test class to execute performance tests for plain Reactive Streams MongoDB driver usage, {@link ReactiveMongoOperations} and the repositories + * abstraction. + * + * @author Mark Paluch + */ +public class ReactivePerformanceTests { + + private static final String DATABASE_NAME = "performance"; + private static final int NUMBER_OF_PERSONS = 300; + private static final int ITERATIONS = 50; + private static final StopWatch watch = new StopWatch(); + private static final Collection IGNORED_WRITE_CONCERNS = Arrays.asList("MAJORITY", "REPLICAS_SAFE", + "FSYNC_SAFE", "FSYNCED", "JOURNAL_SAFE", "JOURNALED", "REPLICA_ACKNOWLEDGED"); + private static final int COLLECTION_SIZE = 1024 * 1024 * 256; // 256 MB + private static final Collection COLLECTION_NAMES = Arrays.asList("template", "driver", "person"); + + MongoClient mongo; + ReactiveMongoTemplate operations; + ReactivePersonRepository repository; + MongoConverter converter; + + @Before + public void setUp() throws Exception { + + this.mongo = MongoClients.create(); + + SimpleReactiveMongoDatabaseFactory mongoDbFactory = new SimpleReactiveMongoDatabaseFactory(this.mongo, DATABASE_NAME); + + MongoMappingContext context = new MongoMappingContext(); + context.setInitialEntitySet(Collections.singleton(Person.class)); + context.afterPropertiesSet(); + + this.converter = new MappingMongoConverter(new DbRefResolver() { + @Override + public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler proxyHandler) { + return null; + } + + @Override + public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, MongoPersistentEntity entity, Object id) { + return null; + } + + @Override + public Document fetch(DBRef dbRef) { + return null; + } + + @Override + public List bulkFetch(List dbRefs) { + return null; + } + }, context); + this.operations = new ReactiveMongoTemplate(mongoDbFactory, converter); + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(operations); + factory.setConversionService(new GenericConversionService()); + + this.repository = factory.getRepository(ReactivePersonRepository.class); + + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void writeWithWriteConcerns() { + executeWithWriteConcerns(new WriteConcernCallback() { + public void doWithWriteConcern(String constantName, WriteConcern concern) { + writeHeadline("WriteConcern: " + constantName); + System.out.println(String.format("Writing %s objects using plain driver took %sms", NUMBER_OF_PERSONS, + writingObjectsUsingPlainDriver(NUMBER_OF_PERSONS, concern))); + System.out.println(String.format("Writing %s objects using template took %sms", NUMBER_OF_PERSONS, + writingObjectsUsingMongoTemplate(NUMBER_OF_PERSONS, concern))); + System.out.println(String.format("Writing %s objects using repository took %sms", NUMBER_OF_PERSONS, + writingObjectsUsingRepositories(NUMBER_OF_PERSONS, concern))); + + System.out.println(String.format("Writing %s objects async using plain driver took %sms", NUMBER_OF_PERSONS, + writingAsyncObjectsUsingPlainDriver(NUMBER_OF_PERSONS, concern))); + System.out.println(String.format("Writing %s objects async using template took %sms", NUMBER_OF_PERSONS, + writingAsyncObjectsUsingMongoTemplate(NUMBER_OF_PERSONS, concern))); + System.out.println(String.format("Writing %s objects async using repository took %sms", NUMBER_OF_PERSONS, + writingAsyncObjectsUsingRepositories(NUMBER_OF_PERSONS, concern))); + writeFooter(); + } + }); + } + + @Test + public void plainConversion() throws InterruptedException { + + Statistics statistics = new Statistics( + "Plain conversion of " + NUMBER_OF_PERSONS * 100 + " persons - After %s iterations"); + + List dbObjects = getPersonDocuments(NUMBER_OF_PERSONS * 100); + + for (int i = 0; i < ITERATIONS; i++) { + statistics.registerTime(Api.DIRECT, Mode.READ, convertDirectly(dbObjects)); + statistics.registerTime(Api.CONVERTER, Mode.READ, convertUsingConverter(dbObjects)); + } + + statistics.printResults(ITERATIONS); + } + + private long convertDirectly(final List dbObjects) { + + executeWatched(new WatchCallback>() { + + @Override + public List doInWatch() { + + List persons = new ArrayList(); + + for (Document dbObject : dbObjects) { + persons.add(Person.from(new Document(dbObject))); + } + + return persons; + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long convertUsingConverter(final List dbObjects) { + + executeWatched(new WatchCallback>() { + + @Override + public List doInWatch() { + + List persons = new ArrayList(); + + for (Document dbObject : dbObjects) { + persons.add(converter.read(Person.class, dbObject)); + } + + return persons; + } + }); + + return watch.getLastTaskTimeMillis(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void writeAndRead() throws Exception { + + readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS, WriteConcern.SAFE); + } + + private void readsAndWrites(int numberOfPersons, int iterations, WriteConcern concern) { + + Statistics statistics = new Statistics("Reading " + numberOfPersons + " - After %s iterations"); + + for (int i = 0; i < iterations; i++) { + + setupCollections(); + + statistics.registerTime(Api.DRIVER, Mode.WRITE, writingObjectsUsingPlainDriver(numberOfPersons, concern)); + statistics.registerTime(Api.TEMPLATE, Mode.WRITE, writingObjectsUsingMongoTemplate(numberOfPersons, concern)); + statistics.registerTime(Api.REPOSITORY, Mode.WRITE, writingObjectsUsingRepositories(numberOfPersons, concern)); + + statistics.registerTime(Api.DRIVER, Mode.WRITE_ASYNC, writingAsyncObjectsUsingPlainDriver(numberOfPersons, concern)); + statistics.registerTime(Api.TEMPLATE, Mode.WRITE_ASYNC, writingAsyncObjectsUsingMongoTemplate(numberOfPersons, concern)); + statistics.registerTime(Api.REPOSITORY, Mode.WRITE_ASYNC, writingAsyncObjectsUsingRepositories(numberOfPersons, concern)); + + statistics.registerTime(Api.DRIVER, Mode.READ, readingUsingPlainDriver()); + statistics.registerTime(Api.TEMPLATE, Mode.READ, readingUsingTemplate()); + statistics.registerTime(Api.REPOSITORY, Mode.READ, readingUsingRepository()); + + statistics.registerTime(Api.DRIVER, Mode.QUERY, queryUsingPlainDriver()); + statistics.registerTime(Api.TEMPLATE, Mode.QUERY, queryUsingTemplate()); + statistics.registerTime(Api.REPOSITORY, Mode.QUERY, queryUsingRepository()); + + if (i > 0 && i % (iterations / 10) == 0) { + statistics.printResults(i); + } + } + + statistics.printResults(iterations); + } + + private void writeHeadline(String headline) { + System.out.println(headline); + System.out.println(createUnderline(headline)); + } + + private void writeFooter() { + System.out.println(); + } + + private long queryUsingTemplate() { + executeWatched(new WatchCallback>() { + public List doInWatch() { + Query query = query(where("addresses.zipCode").regex(".*1.*")); + return operations.find(query, Person.class, "template").collectList().block(); + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long queryUsingRepository() { + executeWatched(new WatchCallback>() { + public List doInWatch() { + return repository.findByAddressesZipCodeContaining("1").collectList().block(); + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private void executeWithWriteConcerns(WriteConcernCallback callback) { + + Constants constants = new Constants(WriteConcern.class); + + for (String constantName : constants.getNames(null)) { + + if (IGNORED_WRITE_CONCERNS.contains(constantName)) { + continue; + } + + WriteConcern writeConcern = (WriteConcern) constants.asObject(constantName); + + setupCollections(); + + callback.doWithWriteConcern(constantName, writeConcern); + } + } + + private void setupCollections() { + + MongoDatabase db = this.mongo.getDatabase(DATABASE_NAME); + + for (String collectionName : COLLECTION_NAMES) { + MongoCollection collection = db.getCollection(collectionName); + Mono.from(collection.drop()).block(); + Mono.from(db.createCollection(collectionName, getCreateCollectionOptions())).block(); + collection.createIndex(new BasicDBObject("firstname", -1)); + collection.createIndex(new BasicDBObject("lastname", -1)); + } + } + + private CreateCollectionOptions getCreateCollectionOptions() { + CreateCollectionOptions options = new CreateCollectionOptions(); + return options.sizeInBytes(COLLECTION_SIZE).capped(false); + } + + private long writingObjectsUsingPlainDriver(int numberOfPersons, WriteConcern concern) { + + final MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver").withWriteConcern(concern); + final List persons = getPersonObjects(numberOfPersons); + + executeWatched(new WatchCallback() { + public Void doInWatch() { + for (Person person : persons) { + Mono.from(collection.insertOne(new Document(person.toDocument()))).block(); + } + return null; + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long writingObjectsUsingRepositories(int numberOfPersons, WriteConcern concern) { + + final List persons = getPersonObjects(numberOfPersons); + operations.setWriteConcern(concern); + executeWatched(new WatchCallback() { + public Void doInWatch() { + for (Person person : persons) { + repository.save(person).block(); + } + return null; + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long writingObjectsUsingMongoTemplate(int numberOfPersons, WriteConcern concern) { + + final List persons = getPersonObjects(numberOfPersons); + + executeWatched(new WatchCallback() { + public Void doInWatch() { + operations.setWriteConcern(concern); + for (Person person : persons) { + Mono.from(operations.save(person, "template")).block(); + } + return null; + } + }); + + return watch.getLastTaskTimeMillis(); + } + + + private long writingAsyncObjectsUsingPlainDriver(int numberOfPersons, WriteConcern concern) { + + final MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver").withWriteConcern(concern); + final List persons = getPersonObjects(numberOfPersons); + + executeWatched(new WatchCallback() { + public Void doInWatch() { + + Flux.from(collection.insertMany(persons.stream().map(person -> new Document(person.toDocument())).collect(Collectors.toList()))).then().block(); + return null; + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long writingAsyncObjectsUsingRepositories(int numberOfPersons, WriteConcern concern) { + + final List persons = getPersonObjects(numberOfPersons); + operations.setWriteConcern(concern); + executeWatched(new WatchCallback() { + public Void doInWatch() { + repository.save(persons).then().block(); + return null; + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long writingAsyncObjectsUsingMongoTemplate(int numberOfPersons, WriteConcern concern) { + + final List persons = getPersonObjects(numberOfPersons); + + + executeWatched(new WatchCallback() { + public Void doInWatch() { + operations.setWriteConcern(concern); + Flux.from(operations.insertAll(persons)).then().block(); + return null; + } + }); + + + + return watch.getLastTaskTimeMillis(); + } + + private long readingUsingPlainDriver() { + + executeWatched(new WatchCallback>() { + public List doInWatch() { + return Flux.from(mongo.getDatabase(DATABASE_NAME).getCollection("driver").find()).map(Person::from).collectList().block(); + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long readingUsingTemplate() { + executeWatched(new WatchCallback>() { + public List doInWatch() { + return operations.findAll(Person.class, "template").collectList().block(); + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long readingUsingRepository() { + executeWatched(new WatchCallback>() { + public List doInWatch() { + return repository.findAll().collectList().block(); + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private long queryUsingPlainDriver() { + + executeWatched(new WatchCallback>() { + public List doInWatch() { + + MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver"); + + Document regex = new Document("$regex", Pattern.compile(".*1.*")); + Document query = new Document("addresses.zipCode", regex); + return Flux.from(collection.find(query)).map(Person::from).collectList().block(); + } + }); + + return watch.getLastTaskTimeMillis(); + } + + private List getPersonObjects(int numberOfPersons) { + + List result = new ArrayList(); + + for (int i = 0; i < numberOfPersons; i++) { + + List
          addresses = new ArrayList
          (); + + for (int a = 0; a < 5; a++) { + addresses.add(new Address("zip" + a, "city" + a)); + } + + Person person = new Person("Firstname" + i, "Lastname" + i, addresses); + + for (int o = 0; o < 10; o++) { + person.orders.add(new Order(LineItem.generate())); + } + + result.add(person); + } + + return result; + } + + private List getPersonDocuments(int numberOfPersons) { + + List dbObjects = new ArrayList(numberOfPersons); + + for (Person person : getPersonObjects(numberOfPersons)) { + dbObjects.add(person.toDocument()); + } + + return dbObjects; + } + + private T executeWatched(WatchCallback callback) { + + watch.start(); + + try { + return callback.doInWatch(); + } finally { + watch.stop(); + } + } + + static class Person { + + ObjectId id; + String firstname, lastname; + List
          addresses; + Set orders; + + public Person(String firstname, String lastname, List
          addresses) { + this.firstname = firstname; + this.lastname = lastname; + this.addresses = addresses; + this.orders = new HashSet(); + } + + public static Person from(Document source) { + + List addressesSource = (List) source.get("addresses"); + List
          addresses = new ArrayList
          (addressesSource.size()); + for (Object addressSource : addressesSource) { + addresses.add(Address.from((Document) addressSource)); + } + + List ordersSource = (List) source.get("orders"); + Set orders = new HashSet(ordersSource.size()); + for (Object orderSource : ordersSource) { + orders.add(Order.from((Document) orderSource)); + } + + Person person = new Person((String) source.get("firstname"), (String) source.get("lastname"), addresses); + person.orders.addAll(orders); + return person; + } + + public Document toDocument() { + + Document dbObject = new Document(); + dbObject.put("firstname", firstname); + dbObject.put("lastname", lastname); + dbObject.put("addresses", writeAll(addresses)); + dbObject.put("orders", writeAll(orders)); + return dbObject; + } + } + + static class Address implements Convertible { + + final String zipCode; + final String city; + final Set types; + + public Address(String zipCode, String city) { + this(zipCode, city, new HashSet(pickRandomNumerOfItemsFrom(Arrays.asList(AddressType.values())))); + } + + @PersistenceConstructor + public Address(String zipCode, String city, Set types) { + this.zipCode = zipCode; + this.city = city; + this.types = types; + } + + public static Address from(Document source) { + String zipCode = (String) source.get("zipCode"); + String city = (String) source.get("city"); + List types = (List) source.get("types"); + + return new Address(zipCode, city, new HashSet(fromList(types, AddressType.class))); + } + + public Document toDocument() { + Document dbObject = new Document(); + dbObject.put("zipCode", zipCode); + dbObject.put("city", city); + dbObject.put("types", toList(types)); + return dbObject; + } + } + + private static > List fromList(List source, Class type) { + + List result = new ArrayList(source.size()); + for (Object object : source) { + result.add(Enum.valueOf(type, object.toString())); + } + return result; + } + + private static > List toList(Collection enums) { + List result = new ArrayList<>(); + for (T element : enums) { + result.add(element.toString()); + } + + return result; + } + + static class Order implements Convertible { + + enum Status { + ORDERED, PAYED, SHIPPED + } + + Date createdAt; + List lineItems; + Status status; + + public Order(List lineItems, Date createdAt) { + this.lineItems = lineItems; + this.createdAt = createdAt; + this.status = Status.ORDERED; + } + + @PersistenceConstructor + public Order(List lineItems, Date createdAt, Status status) { + this.lineItems = lineItems; + this.createdAt = createdAt; + this.status = status; + } + + public static Order from(Document source) { + + List lineItemsSource = (List) source.get("lineItems"); + List lineItems = new ArrayList(lineItemsSource.size()); + for (Object lineItemSource : lineItemsSource) { + lineItems.add(LineItem.from((Document) lineItemSource)); + } + + Date date = (Date) source.get("createdAt"); + Status status = Status.valueOf((String) source.get("status")); + return new Order(lineItems, date, status); + } + + public Order(List lineItems) { + this(lineItems, new Date()); + } + + public Document toDocument() { + Document result = new Document(); + result.put("createdAt", createdAt); + result.put("lineItems", writeAll(lineItems)); + result.put("status", status.toString()); + return result; + } + } + + static class LineItem implements Convertible { + + String description; + double price; + int amount; + + public LineItem(String description, int amount, double price) { + this.description = description; + this.amount = amount; + this.price = price; + } + + public static List generate() { + + LineItem iPad = new LineItem("iPad", 1, 649); + LineItem iPhone = new LineItem("iPhone", 1, 499); + LineItem macBook = new LineItem("MacBook", 2, 1299); + + return pickRandomNumerOfItemsFrom(Arrays.asList(iPad, iPhone, macBook)); + } + + public static LineItem from(Document source) { + + String description = (String) source.get("description"); + double price = (Double) source.get("price"); + int amount = (Integer) source.get("amount"); + + return new LineItem(description, amount, price); + } + + public Document toDocument() { + + Document dbObject = new Document(); + dbObject.put("description", description); + dbObject.put("price", price); + dbObject.put("amount", amount); + return dbObject; + } + } + + private static List pickRandomNumerOfItemsFrom(List source) { + + isTrue(!source.isEmpty()); + + Random random = new Random(); + int numberOfItems = random.nextInt(source.size()); + numberOfItems = numberOfItems == 0 ? 1 : numberOfItems; + + List result = new ArrayList(numberOfItems); + while (result.size() < numberOfItems) { + int index = random.nextInt(source.size()); + T candidate = source.get(index); + if (!result.contains(candidate)) { + result.add(candidate); + } + } + + return result; + } + + enum AddressType { + SHIPPING, BILLING + } + + private interface WriteConcernCallback { + void doWithWriteConcern(String constantName, WriteConcern concern); + } + + private interface WatchCallback { + T doInWatch(); + } + + private interface ReactivePersonRepository extends ReactiveMongoRepository { + + Flux findByAddressesZipCodeContaining(String parameter); + } + + private interface Convertible { + + Document toDocument(); + } + + private static BasicDBList writeAll(Collection convertibles) { + BasicDBList result = new BasicDBList(); + for (Convertible convertible : convertibles) { + result.add(convertible.toDocument()); + } + return result; + } + + enum Api { + DRIVER, TEMPLATE, REPOSITORY, DIRECT, CONVERTER + } + + enum Mode { + WRITE, READ, QUERY, + WRITE_ASYNC + } + + private static class Statistics { + + private final String headline; + private final Map times; + + public Statistics(String headline) { + + this.headline = headline; + this.times = new HashMap(); + + for (Mode mode : Mode.values()) { + times.put(mode, new ModeTimes(mode)); + } + } + + public void registerTime(Api api, Mode mode, double time) { + times.get(mode).add(api, time); + } + + public void printResults(int iterations) { + + String title = String.format(headline, iterations); + + System.out.println(title); + System.out.println(createUnderline(title)); + + StringBuilder builder = new StringBuilder(); + for (Mode mode : Mode.values()) { + String print = times.get(mode).print(); + if (!print.isEmpty()) { + builder.append(print).append('\n'); + } + } + + System.out.println(builder.toString()); + } + + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(times.size()); + + for (ModeTimes times : this.times.values()) { + builder.append(times.toString()); + } + + return builder.toString(); + } + } + + private static String createUnderline(String input) { + + StringBuilder builder = new StringBuilder(input.length()); + + for (int i = 0; i < input.length(); i++) { + builder.append("-"); + } + + return builder.toString(); + } + + static class ApiTimes { + + private static final String TIME_TEMPLATE = "%s %s time -\tAverage: %sms%s,%sMedian: %sms%s"; + + private static final DecimalFormat TIME_FORMAT; + private static final DecimalFormat DEVIATION_FORMAT; + + static { + + TIME_FORMAT = new DecimalFormat("0.00"); + + DEVIATION_FORMAT = new DecimalFormat("0.00"); + DEVIATION_FORMAT.setPositivePrefix("+"); + } + + private final Api api; + private final Mode mode; + private final List times; + + public ApiTimes(Api api, Mode mode) { + this.api = api; + this.mode = mode; + this.times = new ArrayList(); + } + + public void add(double time) { + this.times.add(time); + } + + public boolean hasTimes() { + return !times.isEmpty(); + } + + public double getAverage() { + + double result = 0; + + for (Double time : times) { + result += time; + } + + return result == 0.0 ? 0.0 : result / times.size(); + } + + public double getMedian() { + + if (times.isEmpty()) { + return 0.0; + } + + ArrayList list = new ArrayList(times); + Collections.sort(list); + + int size = list.size(); + + if (size % 2 == 0) { + return (list.get(size / 2 - 1) + list.get(size / 2)) / 2; + } else { + return list.get(size / 2); + } + } + + private double getDeviationFrom(double otherAverage) { + + double average = getAverage(); + return average * 100 / otherAverage - 100; + } + + private double getMediaDeviationFrom(double otherMedian) { + double median = getMedian(); + return median * 100 / otherMedian - 100; + } + + public String print() { + + if (times.isEmpty()) { + return ""; + } + + return basicPrint("", "\t\t", "") + '\n'; + } + + private String basicPrint(String extension, String middle, String foo) { + return String.format(TIME_TEMPLATE, api, mode, TIME_FORMAT.format(getAverage()), extension, middle, + TIME_FORMAT.format(getMedian()), foo); + } + + public String print(double referenceAverage, double referenceMedian) { + + if (times.isEmpty()) { + return ""; + } + + return basicPrint(String.format(" %s%%", DEVIATION_FORMAT.format(getDeviationFrom(referenceAverage))), "\t", + String.format(" %s%%", DEVIATION_FORMAT.format(getMediaDeviationFrom(referenceMedian)))) + '\n'; + } + + /* + * (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return times.isEmpty() ? "" + : String.format("%s, %s: %s", api, mode, StringUtils.collectionToCommaDelimitedString(times)) + '\n'; + } + } + + static class ModeTimes { + + private final Map times; + + public ModeTimes(Mode mode) { + + this.times = new HashMap(); + + for (Api api : Api.values()) { + this.times.put(api, new ApiTimes(api, mode)); + } + } + + public void add(Api api, double time) { + times.get(api).add(time); + } + + @SuppressWarnings("null") + public String print() { + + if (times.isEmpty()) { + return ""; + } + + Double previousTime = null; + Double previousMedian = null; + StringBuilder builder = new StringBuilder(); + + for (Api api : Api.values()) { + + ApiTimes apiTimes = times.get(api); + + if (!apiTimes.hasTimes()) { + continue; + } + + if (previousTime == null) { + builder.append(apiTimes.print()); + previousTime = apiTimes.getAverage(); + previousMedian = apiTimes.getMedian(); + } else { + builder.append(apiTimes.print(previousTime, previousMedian)); + } + } + + return builder.toString(); + } + + /* + * (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + + StringBuilder builder = new StringBuilder(times.size()); + + for (ApiTimes times : this.times.values()) { + builder.append(times.toString()); + } + + return builder.toString(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java new file mode 100644 index 000000000..e00c9fbfa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java @@ -0,0 +1,293 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; + +import java.util.Arrays; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.reactivestreams.Publisher; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.ImportResource; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories; +import org.springframework.data.repository.RepositoryDefinition; +import org.springframework.data.repository.reactive.ReactivePagingAndSortingRepository; +import org.springframework.data.repository.reactive.RxJavaPagingAndSortingRepository; +import org.springframework.stereotype.Repository; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import lombok.Data; +import lombok.NoArgsConstructor; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.TestSubscriber; +import rx.Observable; +import rx.Single; + +/** + * Test for {@link ReactiveMongoRepository} using reactive wrapper type conversion. + * + * @author Mark Paluch + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration(classes = ConvertingReactiveMongoRepositoryTests.Config.class) +public class ConvertingReactiveMongoRepositoryTests { + + @EnableReactiveMongoRepositories(includeFilters = @Filter(value = Repository.class), considerNestedRepositories = true) + @ImportResource("classpath:reactive-infrastructure.xml") + static class Config {} + + @Autowired ReactiveMongoTemplate template; + @Autowired MixedReactivePersonRepostitory reactiveRepository; + @Autowired ReactivePersonRepostitory reactivePersonRepostitory; + @Autowired RxJavaPersonRepostitory rxJavaPersonRepostitory; + + ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia; + + @Before + public void setUp() throws Exception { + + reactiveRepository.deleteAll().block(); + + dave = new ReactivePerson("Dave", "Matthews", 42); + oliver = new ReactivePerson("Oliver August", "Matthews", 4); + carter = new ReactivePerson("Carter", "Beauford", 49); + boyd = new ReactivePerson("Boyd", "Tinsley", 45); + stefan = new ReactivePerson("Stefan", "Lessard", 34); + leroi = new ReactivePerson("Leroi", "Moore", 41); + alicia = new ReactivePerson("Alicia", "Keys", 30); + + TestSubscriber subscriber = TestSubscriber.create(); + reactiveRepository.save(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).subscribe(subscriber); + + subscriber.await().assertComplete().assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void reactiveStreamsMethodsShouldWork() throws Exception { + + TestSubscriber subscriber = TestSubscriber.subscribe(reactivePersonRepostitory.exists(dave.getId())); + + subscriber.awaitAndAssertNextValueCount(1).assertValues(true); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void reactiveStreamsQueryMethodsShouldWork() throws Exception { + + TestSubscriber subscriber = TestSubscriber + .subscribe(reactivePersonRepostitory.findByLastname(boyd.getLastname())); + + subscriber.awaitAndAssertNextValueCount(1).assertValues(boyd); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void simpleRxJavaMethodsShouldWork() throws Exception { + + rx.observers.TestSubscriber subscriber = new rx.observers.TestSubscriber<>(); + rxJavaPersonRepostitory.exists(dave.getId()).subscribe(subscriber); + + subscriber.awaitTerminalEvent(); + subscriber.assertCompleted(); + subscriber.assertNoErrors(); + subscriber.assertValue(true); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void existsWithSingleRxJavaIdMethodsShouldWork() throws Exception { + + rx.observers.TestSubscriber subscriber = new rx.observers.TestSubscriber<>(); + rxJavaPersonRepostitory.exists(Single.just(dave.getId())).subscribe(subscriber); + + subscriber.awaitTerminalEvent(); + subscriber.assertCompleted(); + subscriber.assertNoErrors(); + subscriber.assertValue(true); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void singleRxJavaQueryMethodShouldWork() throws Exception { + + rx.observers.TestSubscriber subscriber = new rx.observers.TestSubscriber<>(); + rxJavaPersonRepostitory.findByFirstnameAndLastname(dave.getFirstname(), dave.getLastname()).subscribe(subscriber); + + subscriber.awaitTerminalEvent(); + subscriber.assertCompleted(); + subscriber.assertNoErrors(); + subscriber.assertValue(dave); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void singleProjectedRxJavaQueryMethodShouldWork() throws Exception { + + rx.observers.TestSubscriber subscriber = new rx.observers.TestSubscriber<>(); + rxJavaPersonRepostitory.findProjectedByLastname(carter.getLastname()).subscribe(subscriber); + + subscriber.awaitTerminalEvent(); + subscriber.assertCompleted(); + subscriber.assertNoErrors(); + + ProjectedPerson projectedPerson = subscriber.getOnNextEvents().get(0); + assertThat(projectedPerson.getFirstname(), is(equalTo(carter.getFirstname()))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void observableRxJavaQueryMethodShouldWork() throws Exception { + + rx.observers.TestSubscriber subscriber = new rx.observers.TestSubscriber<>(); + rxJavaPersonRepostitory.findByLastname(boyd.getLastname()).subscribe(subscriber); + + subscriber.awaitTerminalEvent(); + subscriber.assertCompleted(); + subscriber.assertNoErrors(); + subscriber.assertValue(boyd); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void mixedRepositoryShouldWork() throws Exception { + + ReactivePerson value = reactiveRepository.findByLastname(boyd.getLastname()).toBlocking().value(); + + assertThat(value, is(equalTo(boyd))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindOneBySingleOfLastName() throws Exception { + + ReactivePerson carter = reactiveRepository.findByLastname(Single.just("Beauford")).block(); + + assertThat(carter.getFirstname(), is(equalTo("Carter"))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindByObservableOfLastNameIn() throws Exception { + + List persons = reactiveRepository.findByLastnameIn(Observable.just("Beauford", "Matthews")) + .collectList().block(); + + assertThat(persons, hasItems(carter, dave, oliver)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindByPublisherOfLastNameInAndAgeGreater() throws Exception { + + List persons = reactiveRepository + .findByLastnameInAndAgeGreaterThan(Flux.just("Beauford", "Matthews"), 41).toList().toBlocking().single(); + + assertThat(persons, hasItems(carter, dave)); + } + + @Repository + interface ReactivePersonRepostitory extends ReactivePagingAndSortingRepository { + + Publisher findByLastname(String lastname); + } + + @Repository + interface RxJavaPersonRepostitory extends RxJavaPagingAndSortingRepository { + + Observable findByFirstnameAndLastname(String firstname, String lastname); + + Single findByLastname(String lastname); + + Single findProjectedByLastname(String lastname); + } + + @Repository + interface MixedReactivePersonRepostitory extends ReactiveMongoRepository { + + Single findByLastname(String lastname); + + Mono findByLastname(Single lastname); + + Flux findByLastnameIn(Observable lastname); + + Flux findByLastname(String lastname, Sort sort); + + Observable findByLastnameInAndAgeGreaterThan(Flux lastname, int age); + } + + @Document + @Data + @NoArgsConstructor + static class ReactivePerson { + + @Id String id; + + String firstname; + String lastname; + int age; + + public ReactivePerson(String firstname, String lastname, int age) { + + this.firstname = firstname; + this.lastname = lastname; + this.age = age; + } + } + + interface ProjectedPerson { + + String getId(); + + String getFirstname(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java new file mode 100644 index 000000000..d54bc669c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java @@ -0,0 +1,442 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; +import static org.springframework.data.domain.Sort.Direction.*; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.reactivestreams.Publisher; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanClassLoaderAware; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.geo.Circle; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.repository.Person.Sex; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; +import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.query.DefaultEvaluationContextProvider; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import lombok.NoArgsConstructor; +import reactor.core.Cancellation; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.TestSubscriber; + +/** + * Test for {@link ReactiveMongoRepository} query methods. + * + * @author Mark Paluch + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanFactoryAware { + + @Autowired ReactiveMongoTemplate template; + + ReactiveMongoRepositoryFactory factory; + private ClassLoader classLoader; + private BeanFactory beanFactory; + private ReactivePersonRepository repository; + private ReactiveCappedCollectionRepository cappedRepository; + + Person dave, oliver, carter, boyd, stefan, leroi, alicia; + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + this.classLoader = classLoader == null ? org.springframework.util.ClassUtils.getDefaultClassLoader() : classLoader; + } + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = beanFactory; + } + + @Before + public void setUp() throws Exception { + + factory = new ReactiveMongoRepositoryFactory(template); + factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class); + factory.setBeanClassLoader(classLoader); + factory.setBeanFactory(beanFactory); + factory.setEvaluationContextProvider(DefaultEvaluationContextProvider.INSTANCE); + + repository = factory.getRepository(ReactivePersonRepository.class); + cappedRepository = factory.getRepository(ReactiveCappedCollectionRepository.class); + + repository.deleteAll().block(); + + dave = new Person("Dave", "Matthews", 42); + oliver = new Person("Oliver August", "Matthews", 4); + carter = new Person("Carter", "Beauford", 49); + carter.setSkills(Arrays.asList("Drums", "percussion", "vocals")); + Thread.sleep(10); + boyd = new Person("Boyd", "Tinsley", 45); + boyd.setSkills(Arrays.asList("Violin", "Electric Violin", "Viola", "Mandolin", "Vocals", "Guitar")); + stefan = new Person("Stefan", "Lessard", 34); + leroi = new Person("Leroi", "Moore", 41); + + alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE); + + TestSubscriber subscriber = TestSubscriber.create(); + repository.save(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).subscribe(subscriber); + + subscriber.await().assertComplete().assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindByLastName() { + + List list = repository.findByLastname("Matthews").collectList().block(); + + assertThat(list, hasSize(2)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindMonoOfPage() { + + Mono> pageMono = repository.findMonoPageByLastname("Matthews", new PageRequest(0, 1)); + + Page persons = pageMono.block(); + + assertThat(persons.getContent(), hasSize(1)); + assertThat(persons.getTotalPages(), is(2)); + + pageMono = repository.findMonoPageByLastname("Matthews", new PageRequest(0, 100)); + + persons = pageMono.block(); + + assertThat(persons.getContent(), hasSize(2)); + assertThat(persons.getTotalPages(), is(1)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindMonoOfSlice() { + + Mono> pageMono = repository.findMonoSliceByLastname("Matthews", new PageRequest(0, 1)); + + Slice persons = pageMono.block(); + + assertThat(persons.getContent(), hasSize(1)); + assertThat(persons.hasNext(), is(true)); + + pageMono = repository.findMonoSliceByLastname("Matthews", new PageRequest(0, 100)); + + persons = pageMono.block(); + + assertThat(persons.getContent(), hasSize(2)); + assertThat(persons.hasNext(), is(false)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindOneByLastName() { + + Person carter = repository.findOneByLastname("Beauford").block(); + + assertThat(carter.getFirstname(), is(equalTo("Carter"))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindOneByPublisherOfLastName() { + + Person carter = repository.findByLastname(Mono.just("Beauford")).block(); + + assertThat(carter.getFirstname(), is(equalTo("Carter"))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindByPublisherOfLastNameIn() { + + List persons = repository.findByLastnameIn(Flux.just("Beauford", "Matthews")).collectList().block(); + + assertThat(persons, hasItems(carter, dave, oliver)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindByPublisherOfLastNameInAndAgeGreater() { + + List persons = repository.findByLastnameInAndAgeGreaterThan(Flux.just("Beauford", "Matthews"), 41) + .collectList().block(); + + assertThat(persons, hasItems(carter, dave)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindUsingPublishersInStringQuery() { + + List persons = repository.findStringQuery(Flux.just("Beauford", "Matthews"), Mono.just(41)).collectList() + .block(); + + assertThat(persons, hasItems(carter, dave)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldFindByLastNameAndSort() { + + List persons = repository.findByLastname("Matthews", new Sort(new Order(ASC, "age"))).collectList().block(); + assertThat(persons, contains(oliver, dave)); + + persons = repository.findByLastname("Matthews", new Sort(new Order(DESC, "age"))).collectList().block(); + assertThat(persons, contains(dave, oliver)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldUseInfiniteStream() throws Exception { + + template.dropCollection(Capped.class).block(); + template.createCollection(Capped.class, new CollectionOptions(1000, 100, true)).block(); + template.insert(new Capped("value", Math.random())).block(); + + BlockingQueue documents = new LinkedBlockingDeque<>(100); + + Cancellation cancellation = cappedRepository.findByKey("value").doOnNext(documents::add).subscribe(); + + assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); + + template.insert(new Capped("value", Math.random())).block(); + assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); + assertThat(documents.isEmpty(), is(true)); + + cancellation.dispose(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldUseInfiniteStreamWithProjection() throws Exception { + + template.dropCollection(Capped.class).block(); + template.createCollection(Capped.class, new CollectionOptions(1000, 100, true)).block(); + template.insert(new Capped("value", Math.random())).block(); + + BlockingQueue documents = new LinkedBlockingDeque<>(100); + + Cancellation cancellation = cappedRepository.findProjectionByKey("value").doOnNext(documents::add).subscribe(); + + CappedProjection projection1 = documents.poll(5, TimeUnit.SECONDS); + assertThat(projection1, is(notNullValue())); + assertThat(projection1.getRandom(), is(not(0))); + + template.insert(new Capped("value", Math.random())).block(); + + CappedProjection projection2 = documents.poll(5, TimeUnit.SECONDS); + assertThat(projection2, is(notNullValue())); + assertThat(projection2.getRandom(), is(not(0))); + + assertThat(documents.isEmpty(), is(true)); + + cancellation.dispose(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findsPeopleByLocationWithinCircle() { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).block(); + + repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170)) // + .subscribeWith(TestSubscriber.create()) // + .awaitAndAssertNextValues(dave); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findsPeopleByPageableLocationWithinCircle() { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).block(); + + repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170), new PageRequest(0, 10)) // + .subscribeWith(TestSubscriber.create()) // + .awaitAndAssertNextValues(dave); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findsPeopleGeoresultByLocationWithinBox() { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).block(); + + repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS)) // + .subscribeWith(TestSubscriber.create()) // + .awaitAndAssertNextValuesWith(personGeoResult -> { + + assertThat(personGeoResult.getDistance().getValue(), is(closeTo(1, 1))); + assertThat(personGeoResult.getContent(), is(equalTo(dave))); + }); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findsPeoplePageableGeoresultByLocationWithinBox() { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).block(); + + repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS), new PageRequest(0, 10)) // + .subscribeWith(TestSubscriber.create()) // + .awaitAndAssertNextValuesWith(personGeoResult -> { + + assertThat(personGeoResult.getDistance().getValue(), is(closeTo(1, 1))); + assertThat(personGeoResult.getContent(), is(equalTo(dave))); + }); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findsPeopleByLocationWithinBox() { + + Point point = new Point(-73.99171, 40.738868); + dave.setLocation(point); + repository.save(dave).block(); + + repository.findPersonByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS)) // + .subscribeWith(TestSubscriber.create()) // + .awaitAndAssertNextValues(dave); + } + + interface ReactivePersonRepository extends ReactiveMongoRepository { + + Flux findByLastname(String lastname); + + Mono findOneByLastname(String lastname); + + Mono> findMonoPageByLastname(String lastname, Pageable pageRequest); + + Mono> findMonoSliceByLastname(String lastname, Pageable pageRequest); + + Mono findByLastname(Publisher lastname); + + Flux findByLastnameIn(Publisher lastname); + + Flux findByLastname(String lastname, Sort sort); + + Flux findByLastnameInAndAgeGreaterThan(Flux lastname, int age); + + @Query("{ lastname: { $in: ?0 }, age: { $gt : ?1 } }") + Flux findStringQuery(Flux lastname, Mono age); + + Flux findByLocationWithin(Circle circle); + + Flux findByLocationWithin(Circle circle, Pageable pageable); + + Flux> findByLocationNear(Point point, Distance maxDistance); + + Flux> findByLocationNear(Point point, Distance maxDistance, Pageable pageable); + + Flux findPersonByLocationNear(Point point, Distance maxDistance); + } + + interface ReactiveCappedCollectionRepository extends Repository { + + @InfiniteStream + Flux findByKey(String key); + + @InfiniteStream + Flux findProjectionByKey(String key); + } + + @Document + @NoArgsConstructor + static class Capped { + + String id; + String key; + double random; + + public Capped(String key, double random) { + this.key = key; + this.random = random; + } + } + + interface CappedProjection { + double getRandom(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java new file mode 100644 index 000000000..6d67f46b2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java @@ -0,0 +1,35 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository; + +import reactor.core.publisher.Flux; + +/** + * Sample reactive repository managing {@link Person} entities. + * + * @author Mark Paluch + */ +public interface ReactivePersonRepository extends ReactiveMongoRepository { + + /** + * Returns all {@link Person}s with the given lastname. + * + * @param lastname + * @return + */ + Flux findByLastname(String lastname); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java new file mode 100644 index 000000000..de74174cd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java @@ -0,0 +1,580 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanClassLoaderAware; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; +import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository; +import org.springframework.data.repository.query.DefaultEvaluationContextProvider; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import lombok.Data; +import lombok.NoArgsConstructor; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.TestSubscriber; + +/** + * Test for {@link ReactiveMongoRepository}. + * + * @author Mark Paluch + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanFactoryAware { + + @Autowired private ReactiveMongoTemplate template; + + private ReactiveMongoRepositoryFactory factory; + private ClassLoader classLoader; + private BeanFactory beanFactory; + private ReactivePersonRepostitory repository; + + private ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia; + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + this.classLoader = classLoader == null ? org.springframework.util.ClassUtils.getDefaultClassLoader() : classLoader; + } + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = beanFactory; + } + + @Before + public void setUp() { + + factory = new ReactiveMongoRepositoryFactory(template); + factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class); + factory.setBeanClassLoader(classLoader); + factory.setBeanFactory(beanFactory); + factory.setEvaluationContextProvider(DefaultEvaluationContextProvider.INSTANCE); + + repository = factory.getRepository(ReactivePersonRepostitory.class); + + repository.deleteAll().block(); + + dave = new ReactivePerson("Dave", "Matthews", 42); + oliver = new ReactivePerson("Oliver August", "Matthews", 4); + carter = new ReactivePerson("Carter", "Beauford", 49); + boyd = new ReactivePerson("Boyd", "Tinsley", 45); + stefan = new ReactivePerson("Stefan", "Lessard", 34); + leroi = new ReactivePerson("Leroi", "Moore", 41); + alicia = new ReactivePerson("Alicia", "Keys", 30); + + TestSubscriber subscriber = TestSubscriber.create(); + repository.save(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).subscribe(subscriber); + + subscriber.await().assertComplete().assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void existsByIdShouldReturnTrueForExistingObject() { + + Boolean exists = repository.exists(dave.id).block(); + + assertThat(exists, is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void existsByIdShouldReturnFalseForAbsentObject() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.exists("unknown")); + + testSubscriber.await().assertComplete().assertValues(false).assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void existsByMonoOfIdShouldReturnTrueForExistingObject() { + + Boolean exists = repository.exists(Mono.just(dave.id)).block(); + assertThat(exists, is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void existsByEmptyMonoOfIdShouldReturnEmptyMono() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.exists(Mono.empty())); + + testSubscriber.await().assertComplete().assertNoValues().assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findOneShouldReturnObject() { + + ReactivePerson person = repository.findOne(dave.id).block(); + + assertThat(person.getFirstname(), is(equalTo("Dave"))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findOneShouldCompleteWithoutValueForAbsentObject() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.findOne("unknown")); + + testSubscriber.await().assertComplete().assertNoValues().assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findOneByMonoOfIdShouldReturnTrueForExistingObject() { + + ReactivePerson person = repository.findOne(Mono.just(dave.id)).block(); + + assertThat(person.id, is(equalTo(dave.id))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findOneByEmptyMonoOfIdShouldReturnEmptyMono() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.findOne(Mono.empty())); + + testSubscriber.await().assertComplete().assertNoValues().assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findAllShouldReturnAllResults() { + + List persons = repository.findAll().collectList().block(); + + assertThat(persons, hasSize(7)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findAllByIterableOfIdShouldReturnResults() { + + List persons = repository.findAll(Arrays.asList(dave.id, boyd.id)).collectList().block(); + + assertThat(persons, hasSize(2)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findAllByPublisherOfIdShouldReturnResults() { + + List persons = repository.findAll(Flux.just(dave.id, boyd.id)).collectList().block(); + + assertThat(persons, hasSize(2)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findAllByEmptyPublisherOfIdShouldReturnResults() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.findAll(Flux.empty())); + + testSubscriber.await().assertComplete().assertNoValues().assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findAllWithSortShouldReturnResults() { + + List persons = repository.findAll(new Sort(new Order(Direction.ASC, "age"))).collectList().block(); + + assertThat(persons, hasSize(7)); + assertThat(persons.get(0).getId(), is(equalTo(oliver.getId()))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findAllWithPageRequestShouldReturnPage() { + + Page people = repository.findAll(new PageRequest(0, 10)).block(); + + assertThat(people.getTotalPages(), is(1)); + + List ids = people.getContent().stream().map(ReactivePerson::getId).collect(Collectors.toList()); + + assertThat(ids, hasSize(7)); + assertThat(ids, hasItems(dave.id, carter.id)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void findAllWithPageRequestOfPageSize1ShouldReturnPage() { + + Page people = repository.findAll(new PageRequest(1, 1)).block(); + + List ids = people.getContent().stream().map(ReactivePerson::getId).collect(Collectors.toList()); + + assertThat(people.getTotalPages(), is(7)); + assertThat(ids, hasSize(1)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void countShouldReturnNumberOfRecords() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.count()); + + testSubscriber.await().assertComplete().assertValueCount(1).assertValues(7L).assertNoError(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertEntityShouldInsertEntity() { + + repository.deleteAll().block(); + + ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.insert(person)); + + testSubscriber.await().assertComplete().assertValueCount(1).assertValues(person); + + assertThat(person.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertShouldDeferredWrite() { + + ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); + + repository.insert(person); + + assertThat(person.getId(), is(nullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertIterableOfEntitiesShouldInsertEntity() { + + repository.deleteAll().block(); + + dave.setId(null); + oliver.setId(null); + boyd.setId(null); + + TestSubscriber testSubscriber = TestSubscriber + .subscribe(repository.insert(Arrays.asList(dave, oliver, boyd))); + + testSubscriber.await().assertComplete().assertValueCount(3).assertValues(dave, oliver, boyd); + + assertThat(dave.getId(), is(notNullValue())); + assertThat(oliver.getId(), is(notNullValue())); + assertThat(boyd.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void insertPublisherOfEntitiesShouldInsertEntity() { + + repository.deleteAll().block(); + + dave.setId(null); + oliver.setId(null); + boyd.setId(null); + + TestSubscriber testSubscriber = TestSubscriber + .subscribe(repository.insert(Flux.just(dave, oliver, boyd))); + + testSubscriber.await().assertComplete().assertValueCount(3); + + assertThat(dave.getId(), is(notNullValue())); + assertThat(oliver.getId(), is(notNullValue())); + assertThat(boyd.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void saveEntityShouldUpdateExistingEntity() { + + dave.setFirstname("Hello, Dave"); + dave.setLastname("Bowman"); + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.save(dave)); + + testSubscriber.await().assertComplete().assertValueCount(1).assertValues(dave); + + List matthews = repository.findByLastname("Matthews").collectList().block(); + assertThat(matthews, hasSize(1)); + assertThat(matthews, contains(oliver)); + assertThat(matthews, not(contains(dave))); + + ReactivePerson reactivePerson = repository.findOne(dave.id).block(); + + assertThat(reactivePerson.getFirstname(), is(equalTo(dave.getFirstname()))); + assertThat(reactivePerson.getLastname(), is(equalTo(dave.getLastname()))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void saveEntityShouldInsertNewEntity() { + + ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.save(person)); + + testSubscriber.await().assertComplete().assertValueCount(1).assertValues(person); + + ReactivePerson reactivePerson = repository.findOne(person.id).block(); + + assertThat(reactivePerson.getFirstname(), is(equalTo(person.getFirstname()))); + assertThat(reactivePerson.getLastname(), is(equalTo(person.getLastname()))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void saveIterableOfNewEntitiesShouldInsertEntity() { + + repository.deleteAll().block(); + + dave.setId(null); + oliver.setId(null); + boyd.setId(null); + + TestSubscriber testSubscriber = TestSubscriber + .subscribe(repository.save(Arrays.asList(dave, oliver, boyd))); + + testSubscriber.await().assertComplete().assertValueCount(3).assertValues(dave, oliver, boyd); + + assertThat(dave.getId(), is(notNullValue())); + assertThat(oliver.getId(), is(notNullValue())); + assertThat(boyd.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void saveIterableOfMixedEntitiesShouldInsertEntity() { + + ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); + + dave.setFirstname("Hello, Dave"); + dave.setLastname("Bowman"); + + TestSubscriber testSubscriber = TestSubscriber + .subscribe(repository.save(Arrays.asList(person, dave))); + + testSubscriber.await().assertComplete().assertValueCount(2); + + ReactivePerson persistentDave = repository.findOne(dave.id).block(); + assertThat(persistentDave, is(equalTo(dave))); + + assertThat(person.id, is(notNullValue())); + ReactivePerson persistentHomer = repository.findOne(person.id).block(); + assertThat(persistentHomer, is(equalTo(person))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void savePublisherOfEntitiesShouldInsertEntity() { + + repository.deleteAll().block(); + + dave.setId(null); + oliver.setId(null); + boyd.setId(null); + + TestSubscriber testSubscriber = TestSubscriber + .subscribe(repository.save(Flux.just(dave, oliver, boyd))); + + testSubscriber.await().assertComplete().assertValueCount(3); + + assertThat(dave.getId(), is(notNullValue())); + assertThat(oliver.getId(), is(notNullValue())); + assertThat(boyd.getId(), is(notNullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void deleteAllShouldRemoveEntities() { + + repository.deleteAll().block(); + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.findAll()); + + testSubscriber.await().assertComplete().assertValueCount(0); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void deleteByIdShouldRemoveEntity() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.delete(dave.id)); + + testSubscriber.await().assertComplete().assertNoValues(); + + TestSubscriber verificationSubscriber = TestSubscriber.subscribe(repository.findOne(dave.id)); + + verificationSubscriber.await().assertComplete().assertNoValues(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void deleteShouldRemoveEntity() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.delete(dave)); + + testSubscriber.await().assertComplete().assertNoValues(); + + TestSubscriber verificationSubscriber = TestSubscriber.subscribe(repository.findOne(dave.id)); + + verificationSubscriber.await().assertComplete().assertNoValues(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void deleteIterableOfEntitiesShouldRemoveEntities() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.delete(Arrays.asList(dave, boyd))); + + testSubscriber.await().assertComplete().assertNoValues(); + + TestSubscriber verificationSubscriber = TestSubscriber.subscribe(repository.findOne(boyd.id)); + verificationSubscriber.await().assertComplete().assertNoValues(); + + List matthews = repository.findByLastname("Matthews").collectList().block(); + assertThat(matthews, hasSize(1)); + assertThat(matthews, contains(oliver)); + + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void deletePublisherOfEntitiesShouldRemoveEntities() { + + TestSubscriber testSubscriber = TestSubscriber.subscribe(repository.delete(Flux.just(dave, boyd))); + + testSubscriber.await().assertComplete().assertNoValues(); + + TestSubscriber verificationSubscriber = TestSubscriber.subscribe(repository.findOne(boyd.id)); + verificationSubscriber.await().assertComplete().assertNoValues(); + + List matthews = repository.findByLastname("Matthews").collectList().block(); + assertThat(matthews, hasSize(1)); + assertThat(matthews, contains(oliver)); + + } + + interface ReactivePersonRepostitory extends ReactiveMongoRepository { + + Flux findByLastname(String lastname); + + } + + @Data + @NoArgsConstructor + static class ReactivePerson { + + @Id String id; + + String firstname; + String lastname; + int age; + + public ReactivePerson(String firstname, String lastname, int age) { + + this.firstname = firstname; + this.lastname = lastname; + this.age = age; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java index 32d49c7b8..118766c89 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java @@ -37,7 +37,7 @@ import com.mongodb.MongoClient; /** * Integration tests for {@link MongoRepositoriesRegistrar}. - * + * * @author Oliver Gierke */ @RunWith(SpringJUnit4ClassRunner.class) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java index eb6fd4f40..b2c951713 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java @@ -34,7 +34,7 @@ import org.springframework.data.repository.config.RepositoryConfigurationSource; /** * Unit tests for {@link MongoRepositoryConfigurationExtension}. - * + * * @author Oliver Gierke * @since 1.6 */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java new file mode 100644 index 000000000..a683591d4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.config; + +import static org.hamcrest.CoreMatchers.*; +import static org.junit.Assert.*; + +import java.util.Arrays; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.repository.ReactivePersonRepository; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import com.mongodb.reactivestreams.client.MongoClients; + +/** + * Integration tests for {@link ReactiveMongoRepositoriesRegistrar}. + * + * @author Mark Paluch + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration +public class ReactiveMongoRepositoriesRegistrarIntegrationTests { + + @Configuration + @EnableReactiveMongoRepositories(basePackages = "org.springframework.data.mongodb.repository") + static class Config { + + @Bean + public ReactiveMongoTemplate reactiveMongoTemplate() throws Exception { + return new ReactiveMongoTemplate(new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database")); + } + } + + @Autowired ReactivePersonRepository personRepository; + @Autowired ApplicationContext context; + + /** + * @see DATAMONGO-1444 + */ + @Test + public void testConfiguration() {} + + /** + * @see DATAMONGO-1444 + */ + @Test + public void registersTypePredictingPostProcessor() { + + Iterable beanNames = Arrays.asList(context.getBeanDefinitionNames()); + + assertThat(beanNames, hasItem(containsString("RepositoryFactoryBeanSupport_Predictor"))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java new file mode 100644 index 000000000..e9f0d4f92 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java @@ -0,0 +1,121 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.config; + +import static org.junit.Assert.*; + +import java.util.Collection; + +import org.junit.Test; +import org.springframework.core.env.Environment; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.io.ResourceLoader; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; +import org.springframework.core.type.StandardAnnotationMetadata; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.repository.MongoRepository; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource; +import org.springframework.data.repository.config.RepositoryConfiguration; +import org.springframework.data.repository.config.RepositoryConfigurationSource; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import org.springframework.data.repository.reactive.RxJavaCrudRepository; + +/** + * Unit tests for {@link ReactiveMongoRepositoryConfigurationExtension}. + * + * @author Mark Paluch + */ +public class ReactiveMongoRepositoryConfigurationExtensionUnitTests { + + StandardAnnotationMetadata metadata = new StandardAnnotationMetadata(Config.class, true); + ResourceLoader loader = new PathMatchingResourcePatternResolver(); + Environment environment = new StandardEnvironment(); + RepositoryConfigurationSource configurationSource = new AnnotationRepositoryConfigurationSource(metadata, + EnableReactiveMongoRepositories.class, loader, environment); + + /** + * @see DATAMONGO-1444 + */ + @Test + public void isStrictMatchIfDomainTypeIsAnnotatedWithDocument() { + + ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension(); + assertHasRepo(SampleRepository.class, extension.getRepositoryConfigurations(configurationSource, loader, true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void isStrictMatchIfRepositoryExtendsStoreSpecificBase() { + + ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension(); + assertHasRepo(StoreRepository.class, extension.getRepositoryConfigurations(configurationSource, loader, true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void isNotStrictMatchIfDomainTypeIsNotAnnotatedWithDocument() { + + ReactiveMongoRepositoryConfigurationExtension extension = new ReactiveMongoRepositoryConfigurationExtension(); + assertDoesNotHaveRepo(UnannotatedRepository.class, + extension.getRepositoryConfigurations(configurationSource, loader, true)); + } + + private static void assertHasRepo(Class repositoryInterface, + Collection> configs) { + + for (RepositoryConfiguration config : configs) { + if (config.getRepositoryInterface().equals(repositoryInterface.getName())) { + return; + } + } + + fail("Expected to find config for repository interface ".concat(repositoryInterface.getName()).concat(" but got ") + .concat(configs.toString())); + } + + private static void assertDoesNotHaveRepo(Class repositoryInterface, + Collection> configs) { + + for (RepositoryConfiguration config : configs) { + if (config.getRepositoryInterface().equals(repositoryInterface.getName())) { + fail("Expected not to find config for repository interface ".concat(repositoryInterface.getName())); + } + } + } + + @EnableReactiveMongoRepositories(considerNestedRepositories = true) + static class Config { + + } + + @Document + static class Sample {} + + static class Store {} + + interface SampleRepository extends ReactiveCrudRepository {} + + interface UnannotatedRepository extends RxJavaCrudRepository {} + + interface StoreRepository extends ReactiveMongoRepository {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java index e9713a383..3d96b30ae 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java @@ -31,7 +31,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; /** * Integration test for the combination of JavaConfig and an {@link Repositories} wrapper. - * + * * @author Thomas Darimont */ @RunWith(SpringJUnit4ClassRunner.class) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java new file mode 100644 index 000000000..969999f6d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java @@ -0,0 +1,28 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.custom; + +import org.springframework.data.mongodb.repository.User; +import org.springframework.data.repository.reactive.RxJavaCrudRepository; + +/** + * @author Mark Paluch + */ +public interface CustomReactiveMongoRepository + extends RxJavaCrudRepository, CustomReactiveMongoRepositoryCustom { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java new file mode 100644 index 000000000..9e2888c53 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java @@ -0,0 +1,30 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.custom; + +import java.util.List; + +import org.springframework.data.mongodb.repository.User; + +/** + * @author Mark Paluch + */ +public interface CustomReactiveMongoRepositoryCustom { + + List findByUsernameCustom(String username); + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java new file mode 100644 index 000000000..c99d96f1d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java @@ -0,0 +1,37 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.repository.custom; + +import java.util.Collections; +import java.util.List; + +import org.springframework.data.mongodb.repository.User; + +/** + * @author Mark Paluch + */ +public class CustomReactiveMongoRepositoryImpl implements CustomReactiveMongoRepositoryCustom { + + @Override + public List findByUsernameCustom(String username) { + + User user = new User(); + user.setUsername(username); + + return Collections.singletonList(user); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java new file mode 100644 index 000000000..d77f8f882 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.custom; + +import static org.hamcrest.CoreMatchers.*; +import static org.junit.Assert.*; + +import java.util.List; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.ImportResource; +import org.springframework.data.mongodb.repository.User; +import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +/** + * Integration tests for custom reactive Repository implementations. + * + * @author Mark Paluch + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration +public class CustomReactiveRepositoryImplementationTests { + + @Configuration + @EnableReactiveMongoRepositories + @ImportResource("classpath:reactive-infrastructure.xml") + static class Config {} + + @Autowired CustomReactiveMongoRepository customMongoRepository; + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldExecuteMethodOnCustomRepositoryImplementation() { + + String username = "bubu"; + List users = customMongoRepository.findByUsernameCustom(username); + + assertThat(users.size(), is(1)); + assertThat(users.get(0), is(notNullValue())); + assertThat(users.get(0).getUsername(), is(username)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java index d375bd0e1..66d07cd64 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java @@ -32,7 +32,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; /** * Integration tests for custom Repository implementations. - * + * * @author Thomas Darimont */ @RunWith(SpringJUnit4ClassRunner.class) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java new file mode 100644 index 000000000..018439fb5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java @@ -0,0 +1,142 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.util.Arrays; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Range; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.PagedExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.SlicedExecution; +import org.springframework.data.util.ClassTypeInformation; +import org.springframework.util.ClassUtils; + +import reactor.core.publisher.Flux; + +/** + * Unit tests for {@link ReactiveMongoQueryExecution}. + * + * @author Mark Paluch + */ +@RunWith(MockitoJUnitRunner.class) +public class ReactiveMongoQueryExecutionUnitTests { + + @Mock private ReactiveMongoOperations operations; + @Mock private MongoParameterAccessor parameterAccessor; + + /** + * @see DATAMONGO-1444 + */ + @Test + public void slicedExecutionShouldApplyQuerySettings() throws Exception { + + Query query = new Query(); + + new SlicedExecution(operations, new PageRequest(1, 10)).execute(query, Person.class, "person"); + + assertThat(query.getLimit(), is(equalTo(11))); + assertThat(query.getSkip(), is(equalTo(10))); + verify(operations).find(query, Person.class, "person"); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void pagedExecutionShouldApplyQuerySettings() throws Exception { + + Query query = new Query(); + + new PagedExecution(operations, new PageRequest(1, 10)).execute(query, Person.class, "person"); + + assertThat(query.getLimit(), is(equalTo(10))); + assertThat(query.getSkip(), is(equalTo(10))); + + verify(operations).find(query, Person.class, "person"); + verify(operations).count(query, Person.class, "person"); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void geoNearExecutionShouldApplyQuerySettings() throws Exception { + + Method geoNear = ClassUtils.getMethod(GeoRepo.class, "geoNear"); + Query query = new Query(); + when(parameterAccessor.getGeoNearLocation()).thenReturn(new Point(1, 2)); + when(parameterAccessor.getDistanceRange()).thenReturn(new Range<>(new Distance(10), new Distance(15))); + when(parameterAccessor.getPageable()).thenReturn(new PageRequest(1, 10)); + + new GeoNearExecution(operations, parameterAccessor, ClassTypeInformation.fromReturnTypeOf(geoNear)).execute(query, + Person.class, "person"); + + ArgumentCaptor queryArgumentCaptor = ArgumentCaptor.forClass(NearQuery.class); + verify(operations).geoNear(queryArgumentCaptor.capture(), eq(Person.class), eq("person")); + + NearQuery nearQuery = queryArgumentCaptor.getValue(); + assertThat(nearQuery.toDocument().get("near"), is(equalTo(Arrays.asList(1d, 2d)))); + assertThat(nearQuery.getSkip(), is(10)); + assertThat(nearQuery.getMinDistance(), is(equalTo(new Distance(10)))); + assertThat(nearQuery.getMaxDistance(), is(equalTo(new Distance(15)))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void geoNearExecutionShouldApplyMinimalSettings() throws Exception { + + Method geoNear = ClassUtils.getMethod(GeoRepo.class, "geoNear"); + Query query = new Query(); + when(parameterAccessor.getGeoNearLocation()).thenReturn(new Point(1, 2)); + when(parameterAccessor.getDistanceRange()).thenReturn(new Range<>(null, null)); + + new GeoNearExecution(operations, parameterAccessor, ClassTypeInformation.fromReturnTypeOf(geoNear)).execute(query, + Person.class, "person"); + + ArgumentCaptor queryArgumentCaptor = ArgumentCaptor.forClass(NearQuery.class); + verify(operations).geoNear(queryArgumentCaptor.capture(), eq(Person.class), eq("person")); + + NearQuery nearQuery = queryArgumentCaptor.getValue(); + assertThat(nearQuery.toDocument().get("near"), is(equalTo(Arrays.asList(1d, 2d)))); + assertThat(nearQuery.getSkip(), is(0)); + assertThat(nearQuery.getMinDistance(), is(nullValue())); + assertThat(nearQuery.getMaxDistance(), is(nullValue())); + } + + interface GeoRepo { + Flux> geoNear(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java new file mode 100644 index 000000000..bea1b1337 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java @@ -0,0 +1,248 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; + +import java.lang.reflect.Method; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.User; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Contact; +import org.springframework.data.mongodb.repository.Meta; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Unit test for {@link ReactiveMongoQueryMethod}. + * + * @author Mark Paluch + */ +public class ReactiveMongoQueryMethodUnitTests { + + MongoMappingContext context; + + @Before + public void setUp() { + context = new MongoMappingContext(); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void detectsCollectionFromRepoTypeIfReturnTypeNotAssignable() throws Exception { + + ReactiveMongoQueryMethod queryMethod = queryMethod(SampleRepository.class, "method"); + MongoEntityMetadata metadata = queryMethod.getEntityInformation(); + + assertThat(metadata.getJavaType(), is(typeCompatibleWith(Address.class))); + assertThat(metadata.getCollectionName(), is("contact")); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void detectsCollectionFromReturnTypeIfReturnTypeAssignable() throws Exception { + + MongoQueryMethod queryMethod = queryMethod(SampleRepository2.class, "method"); + MongoEntityMetadata entityInformation = queryMethod.getEntityInformation(); + + assertThat(entityInformation.getJavaType(), is(typeCompatibleWith(Person.class))); + assertThat(entityInformation.getCollectionName(), is("person")); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void discoversUserAsDomainTypeForGeoPagingQueryMethod() throws Exception { + + MongoQueryMethod queryMethod = queryMethod(PersonRepository.class, "findByLocationNear", Point.class, + Distance.class, Pageable.class); + assertThat(queryMethod.isGeoNearQuery(), is(false)); + assertThat(queryMethod.isPageQuery(), is(false)); + + queryMethod = queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class); + assertThat(queryMethod.isGeoNearQuery(), is(false)); + assertThat(queryMethod.isPageQuery(), is(false)); + assertThat(queryMethod.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class))); + + assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery(), + is(true)); + assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery(), + is(false)); + assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery(), + is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = IllegalArgumentException.class) + public void rejectsNullMappingContext() throws Exception { + + Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Point.class); + + new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), + new SpelAwareProxyProjectionFactory(), null); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = IllegalStateException.class) + public void rejectsMonoPageableResult() throws Exception { + queryMethod(PersonRepository.class, "findMonoByLastname", String.class, Pageable.class); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void createsMongoQueryMethodObjectForMethodReturningAnInterface() throws Exception { + queryMethod(SampleRepository2.class, "methodReturningAnInterface"); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void createsMongoQueryMethodWithEmptyMetaCorrectly() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "emptyMetaAnnotation"); + + assertThat(method.hasQueryMetaAttributes(), is(true)); + assertThat(method.getQueryMetaAttributes().hasValues(), is(false)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void createsMongoQueryMethodWithMaxExecutionTimeCorrectly() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxExecutionTime"); + + assertThat(method.hasQueryMetaAttributes(), is(true)); + assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void acceptsPageableMethodsUsingWrappedPage() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findMonoPageByLastname", String.class, + Pageable.class); + + assertThat(method.isPageQuery(), is(true)); + assertThat(method.isSliceQuery(), is(false)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void acceptsPageableMethodsUsingWrappedSlice() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findMonoSliceByLastname", String.class, + Pageable.class); + + assertThat(method.isPageQuery(), is(false)); + assertThat(method.isSliceQuery(), is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); + + assertThat(method.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class))); + } + + private ReactiveMongoQueryMethod queryMethod(Class repository, String name, Class... parameters) + throws Exception { + + Method method = repository.getMethod(name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + return new ReactiveMongoQueryMethod(method, new DefaultRepositoryMetadata(repository), factory, context); + } + + interface PersonRepository extends Repository { + + Mono findMonoByLastname(String lastname, Pageable pageRequest); + + Mono> findMonoPageByLastname(String lastname, Pageable pageRequest); + + Mono> findMonoSliceByLastname(String lastname, Pageable pageRequest); + + // Misses Pageable + Flux findByLocationNear(Point point, Distance distance); + + Flux findByLocationNear(Point point, Distance distance, Pageable pageable); + + Mono> findByEmailAddress(String lastname, Point location); + + Flux findByFirstname(String firstname, Point location); + + Flux> findByLastname(String lastname, Point location); + + @Meta + Flux emptyMetaAnnotation(); + + @Meta(maxExecutionTimeMs = 100) + Flux metaWithMaxExecutionTime(); + + void deleteByUserName(String userName); + } + + interface SampleRepository extends Repository { + + List
          method(); + } + + interface SampleRepository2 extends Repository { + + List method(); + + Customer methodReturningAnInterface(); + } + + interface Customer {} +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java new file mode 100644 index 000000000..2ef286585 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java @@ -0,0 +1,297 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.util.Collections; +import java.util.Map; + +import javax.xml.bind.DatatypeConverter; + +import org.bson.BSON; +import org.bson.Document; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.Query; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.DefaultEvaluationContextProvider; +import org.springframework.data.repository.util.QueryExecutionConverters; +import org.springframework.expression.spel.standard.SpelExpressionParser; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Unit tests for {@link ReactiveStringBasedMongoQuery}. + * + * @author Mark Paluch + */ +@RunWith(MockitoJUnitRunner.class) +public class ReactiveStringBasedMongoQueryUnitTests { + + SpelExpressionParser PARSER = new SpelExpressionParser(); + + @Mock ReactiveMongoOperations operations; + @Mock DbRefResolver factory; + + MongoConverter converter; + + @Before + public void setUp() { + + when(operations.getConverter()).thenReturn(converter); + + this.converter = new MappingMongoConverter(factory, new MongoMappingContext()); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void bindsSimplePropertyCorrectly() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastname", String.class); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); + + assertThat(query.getQueryObject(), is(reference.getQueryObject())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void bindsComplexPropertyCorrectly() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByAddress", Address.class); + + Address address = new Address("Foo", "0123", "Bar"); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, address); + + Document dbObject = new Document(); + converter.write(address, dbObject); + dbObject.remove(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + Document queryObject = new Document("address", dbObject); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(queryObject); + + assertThat(query.getQueryObject().toJson(), is(reference.getQueryObject().toJson())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void constructsDeleteQueryCorrectly() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("removeByLastname", String.class); + assertThat(mongoQuery.isDeleteQuery(), is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = IllegalArgumentException.class) + public void preventsDeleteAndCountFlagAtTheSameTime() throws Exception { + createQueryForMethod("invalidMethod", String.class); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldSupportFindByParameterizedCriteriaAndFields() throws Exception { + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Document("firstname", "first").append("lastname", "last"), Collections.singletonMap("lastname", 1)); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByParameterizedCriteriaAndFields", + Document.class, Map.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getQueryObject(), + is(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject())); + assertThat(query.getFieldsObject(), is(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldParseQueryWithParametersInExpression() throws Exception { + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, 1, 2, 3, 4); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithParametersInExpression", int.class, + int.class, int.class, int.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getQueryObject(), + is(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}") + .getQueryObject())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldParseJsonKeyReplacementCorrectly() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("methodWithPlaceholderInKeyOfJsonStructure", + String.class, String.class); + ConvertingParameterAccessor parameterAccessor = StubParameterAccessor.getAccessor(converter, "key", "value"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor); + + assertThat(query.getQueryObject(), is(new Document().append("key", "value"))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldSupportExpressionsInCustomQueries() throws Exception { + + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpression", String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); + + assertThat(query.getQueryObject(), is(reference.getQueryObject())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldSupportExpressionsInCustomQueriesWithNestedObject() throws Exception { + + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2"); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndNestedObject", + boolean.class, String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{ \"id\" : { \"$exists\" : true}}"); + + assertThat(query.getQueryObject(), is(reference.getQueryObject())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldSupportExpressionsInCustomQueriesWithMultipleNestedObjects() throws Exception { + + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2"); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndMultipleNestedObjects", + boolean.class, String.class, String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{ \"id\" : { \"$exists\" : true} , \"foo\" : 42 , \"bar\" : { \"$exists\" : false}}"); + + assertThat(query.getQueryObject(), is(reference.getQueryObject())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void shouldSupportNonQuotedBinaryDataReplacement() throws Exception { + + byte[] binaryData = "Matthews".getBytes("UTF-8"); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, binaryData); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsBinary", byte[].class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : { '$binary' : '" + + DatatypeConverter.printBase64Binary(binaryData) + "', '$type' : " + BSON.B_GENERAL + "}}"); + + assertThat(query.getQueryObject().toJson(), is(reference.getQueryObject().toJson())); + } + + private ReactiveStringBasedMongoQuery createQueryForMethod(String name, Class... parameters) throws Exception { + + DefaultConversionService conversionService = new DefaultConversionService(); + QueryExecutionConverters.registerConvertersIn(conversionService); + + Method method = SampleRepository.class.getMethod(name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, + new DefaultRepositoryMetadata(SampleRepository.class), factory, converter.getMappingContext()); + return new ReactiveStringBasedMongoQuery(queryMethod, operations, PARSER, DefaultEvaluationContextProvider.INSTANCE, + conversionService); + } + + private interface SampleRepository extends Repository { + + @Query("{ 'lastname' : ?0 }") + Mono findByLastname(String lastname); + + @Query("{ 'lastname' : ?0 }") + Mono findByLastnameAsBinary(byte[] lastname); + + @Query("{ 'address' : ?0 }") + Mono findByAddress(Address address); + + @Query(value = "{ 'lastname' : ?0 }", delete = true) + Mono removeByLastname(String lastname); + + @Query(value = "{ 'lastname' : ?0 }", delete = true, count = true) + Mono invalidMethod(String lastname); + + @Query(value = "?0", fields = "?1") + Mono findByParameterizedCriteriaAndFields(Document criteria, Map fields); + + @Query("{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}") + Flux findByQueryWithParametersInExpression(int param1, int param2, int param3, int param4); + + @Query("{ ?0 : ?1}") + Mono methodWithPlaceholderInKeyOfJsonStructure(String keyReplacement, String valueReplacement); + + @Query("{'lastname': ?#{[0]} }") + Flux findByQueryWithExpression(String param0); + + @Query("{'id':?#{ [0] ? { $exists :true} : [1] }}") + Flux findByQueryWithExpressionAndNestedObject(boolean param0, String param1); + + @Query("{'id':?#{ [0] ? { $exists :true} : [1] }, 'foo':42, 'bar': ?#{ [0] ? { $exists :false} : [1] }}") + Flux findByQueryWithExpressionAndMultipleNestedObjects(boolean param0, String param1, String param2); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactivePageImplUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactivePageImplUnitTests.java new file mode 100644 index 000000000..67e867a92 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactivePageImplUnitTests.java @@ -0,0 +1,150 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; + +import org.junit.Test; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.mongodb.repository.support.ReactivePageImpl; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Unit tests for {@link ReactivePageImpl}. + * + * @author Mark Paluch + */ +public class ReactivePageImplUnitTests { + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = IllegalArgumentException.class) + public void preventsNullContentForAdvancedSetup() throws Exception { + new ReactivePageImpl(null, null, Mono.just(0L)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void returnsNextPageable() { + + Page page = new ReactivePageImpl<>(Flux.just(new Object()), new PageRequest(0, 1), Mono.just(10L)); + + assertThat(page.isFirst(), is(true)); + assertThat(page.hasPrevious(), is(false)); + assertThat(page.previousPageable(), is(nullValue())); + + assertThat(page.isLast(), is(false)); + assertThat(page.hasNext(), is(true)); + assertThat(page.nextPageable(), is(new PageRequest(1, 1))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void returnsContentBoundedByPageSize() { + + Page page = new ReactivePageImpl<>(Flux.just(new Object(), new Object()), new PageRequest(0, 1), Mono.just(10L)); + + assertThat(page.getContent(), hasSize(1)); + assertThat(page.hasNext(), is(true)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void returnsPreviousPageable() { + + Page page = new ReactivePageImpl<>(Flux.just(new Object()), new PageRequest(1, 1), Mono.just(2L)); + + assertThat(page.isFirst(), is(false)); + assertThat(page.hasPrevious(), is(true)); + assertThat(page.previousPageable(), is(new PageRequest(0, 1))); + + assertThat(page.isLast(), is(true)); + assertThat(page.hasNext(), is(false)); + assertThat(page.nextPageable(), is(nullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void transformsPageCorrectly() { + + Page transformed = new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(0, 2), Mono.just(10L)) + .map(String::length); + + assertThat(transformed.getContent(), hasSize(2)); + assertThat(transformed.getContent(), contains(3, 3)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void adaptsTotalForLastPageOnIntermediateDeletion() { + assertThat(new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(0, 5), Mono.just(3L)).getTotalElements(), + is(2L)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void adaptsTotalForLastPageOnIntermediateInsertion() { + assertThat(new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(0, 5), Mono.just(1L)).getTotalElements(), + is(2L)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void adaptsTotalForLastPageOnIntermediateDeletionOnLastPate() { + assertThat( + new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(1, 10), Mono.just(13L)).getTotalElements(), + is(12L)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void adaptsTotalForLastPageOnIntermediateInsertionOnLastPate() { + assertThat( + new ReactivePageImpl<>(Flux.just("foo", "bar"), new PageRequest(1, 10), Mono.just(11L)).getTotalElements(), + is(12L)); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void doesNotAdapttotalIfPageIsEmpty() { + + assertThat(new ReactivePageImpl(Flux.empty(), new PageRequest(1, 10), Mono.just(0L)).getTotalElements(), + is(0L)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveSliceImplUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveSliceImplUnitTests.java new file mode 100644 index 000000000..c3c019ecc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveSliceImplUnitTests.java @@ -0,0 +1,88 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.hamcrest.Matchers.*; +import static org.junit.Assert.*; + +import org.junit.Test; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Slice; + +import reactor.core.publisher.Flux; + +/** + * Unit tests for {@link ReactiveSliceImpl}. + * + * @author Mark Paluch + */ +public class ReactiveSliceImplUnitTests { + + /** + * @see DATAMONGO-1444 + */ + @Test(expected = IllegalArgumentException.class) + public void preventsNullContentForAdvancedSetup() throws Exception { + new ReactiveSliceImpl(null, null); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void returnsNextPageable() { + + Slice page = new ReactiveSliceImpl<>(Flux.just(new Object(), new Object()), new PageRequest(0, 1)); + + assertThat(page.isFirst(), is(true)); + assertThat(page.hasPrevious(), is(false)); + assertThat(page.previousPageable(), is(nullValue())); + + assertThat(page.isLast(), is(false)); + assertThat(page.hasNext(), is(true)); + assertThat(page.nextPageable(), is(new PageRequest(1, 1))); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void returnsPreviousPageable() { + + Slice page = new ReactiveSliceImpl<>(Flux.just(new Object()), new PageRequest(1, 1)); + + assertThat(page.isFirst(), is(false)); + assertThat(page.hasPrevious(), is(true)); + assertThat(page.previousPageable(), is(new PageRequest(0, 1))); + + assertThat(page.isLast(), is(true)); + assertThat(page.hasNext(), is(false)); + assertThat(page.nextPageable(), is(nullValue())); + } + + /** + * @see DATAMONGO-1444 + */ + @Test + public void transformsPageCorrectly() { + + Slice transformed = new ReactiveSliceImpl<>(Flux.just("foo", "bar"), new PageRequest(0, 2)) + .map(String::length); + + assertThat(transformed.getContent(), hasSize(2)); + assertThat(transformed.getContent(), contains(3, 3)); + } +} diff --git a/spring-data-mongodb/src/test/java/reactor/test/TestSubscriber.java b/spring-data-mongodb/src/test/java/reactor/test/TestSubscriber.java new file mode 100644 index 000000000..b6ca1cde6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/reactor/test/TestSubscriber.java @@ -0,0 +1,1180 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package reactor.test; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLongFieldUpdater; +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; +import java.util.function.BooleanSupplier; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; +import reactor.core.Fuseable; +import reactor.core.Receiver; +import reactor.core.Trackable; +import reactor.core.publisher.Operators; + + + +/** + *
          + *  ###############################################################
          + *  ###############################################################
          + *  ###############################################################
          + *
          + *  	THIS CODE IS IMPORTED FROM REACTOR-CORE BECAUSE OF
          + *  	https://github.com/reactor/reactor-core/issues/135
          + *
          + *  ###############################################################
          + *  ###############################################################
          + *  ###############################################################
          + * 
          + * + * + * A Subscriber implementation that hosts assertion tests for its state and allows + * asynchronous cancellation and requesting. + * + *

          To create a new instance of {@link TestSubscriber}, you have the choice between + * these static methods: + *

            + *
          • {@link TestSubscriber#subscribe(Publisher)}: create a new {@link TestSubscriber}, + * subscribe to it with the specified {@link Publisher} and requests an unbounded + * number of elements.
          • + *
          • {@link TestSubscriber#subscribe(Publisher, long)}: create a new {@link TestSubscriber}, + * subscribe to it with the specified {@link Publisher} and requests {@code n} elements + * (can be 0 if you want no initial demand). + *
          • {@link TestSubscriber#create()}: create a new {@link TestSubscriber} and requests + * an unbounded number of elements.
          • + *
          • {@link TestSubscriber#create(long)}: create a new {@link TestSubscriber} and + * requests {@code n} elements (can be 0 if you want no initial demand). + *
          + * + *

          If you are testing asynchronous publishers, don't forget to use one of the + * {@code await*()} methods to wait for the data to assert. + * + *

          You can extend this class but only the onNext, onError and onComplete can be overridden. + * You can call {@link #request(long)} and {@link #cancel()} from any thread or from within + * the overridable methods but you should avoid calling the assertXXX methods asynchronously. + * + *

          Usage: + *

          + * {@code
          + * TestSubscriber
          + *   .subscribe(publisher)
          + *   .await()
          + *   .assertValues("ABC", "DEF");
          + * }
          + * 
          + * + * @param the value type. + * + * @author Sebastien Deleuze + * @author David Karnok + * @author Anatoly Kadyshev + * @author Stephane Maldini + * @author Brian Clozel + */ +public class TestSubscriber + implements Subscriber, Subscription, Trackable, Receiver { + + /** + * Default timeout for waiting next values to be received + */ + public static final Duration DEFAULT_VALUES_TIMEOUT = Duration.ofSeconds(3); + + @SuppressWarnings("rawtypes") + private static final AtomicLongFieldUpdater REQUESTED = + AtomicLongFieldUpdater.newUpdater(TestSubscriber.class, "requested"); + + @SuppressWarnings("rawtypes") + private static final AtomicReferenceFieldUpdater NEXT_VALUES = + AtomicReferenceFieldUpdater.newUpdater(TestSubscriber.class, List.class, + "values"); + + @SuppressWarnings("rawtypes") + private static final AtomicReferenceFieldUpdater S = + AtomicReferenceFieldUpdater.newUpdater(TestSubscriber.class, Subscription.class, "s"); + + + private final List errors = new LinkedList<>(); + + private final CountDownLatch cdl = new CountDownLatch(1); + + volatile Subscription s; + + volatile long requested; + + volatile List values = new LinkedList<>(); + + /** + * The fusion mode to request. + */ + private int requestedFusionMode = -1; + + /** + * The established fusion mode. + */ + private volatile int establishedFusionMode = -1; + + /** + * The fuseable QueueSubscription in case a fusion mode was specified. + */ + private Fuseable.QueueSubscription qs; + + private int subscriptionCount = 0; + + private int completionCount = 0; + + private volatile long valueCount = 0L; + + private volatile long nextValueAssertedCount = 0L; + + private Duration valuesTimeout = DEFAULT_VALUES_TIMEOUT; + + private boolean valuesStorage = true; + +// ============================================================================================================== +// Static methods +// ============================================================================================================== + + /** + * Blocking method that waits until {@code conditionSupplier} returns true, or if it + * does not before the specified timeout, throws an {@link AssertionError} with the + * specified error message supplier. + * + * @param timeout the timeout duration + * @param errorMessageSupplier the error message supplier + * @param conditionSupplier condition to break out of the wait loop + * + * @throws AssertionError + */ + public static void await(Duration timeout, Supplier errorMessageSupplier, + BooleanSupplier conditionSupplier) { + + Objects.requireNonNull(errorMessageSupplier); + Objects.requireNonNull(conditionSupplier); + Objects.requireNonNull(timeout); + + long timeoutNs = timeout.toNanos(); + long startTime = System.nanoTime(); + do { + if (conditionSupplier.getAsBoolean()) { + return; + } + try { + Thread.sleep(100); + } + catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + } + } + while (System.nanoTime() - startTime < timeoutNs); + throw new AssertionError(errorMessageSupplier.get()); + } + + /** + * Blocking method that waits until {@code conditionSupplier} returns true, or if it + * does not before the specified timeout, throw an {@link AssertionError} with the + * specified error message. + * + * @param timeout the timeout duration + * @param errorMessage the error message + * @param conditionSupplier condition to break out of the wait loop + * + * @throws AssertionError + */ + public static void await(Duration timeout, + final String errorMessage, + BooleanSupplier conditionSupplier) { + await(timeout, new Supplier() { + @Override + public String get() { + return errorMessage; + } + }, conditionSupplier); + } + + /** + * Create a new {@link TestSubscriber} that requests an unbounded number of elements. + *

          Be sure at least a publisher has subscribed to it via {@link Publisher#subscribe(Subscriber)} + * before use assert methods. + * @see #subscribe(Publisher) + * @param the observed value type + * @return a fresh TestSubscriber instance + */ + public static TestSubscriber create() { + return new TestSubscriber<>(); + } + + /** + * Create a new {@link TestSubscriber} that requests initially {@code n} elements. You + * can then manage the demand with {@link Subscription#request(long)}. + *

          Be sure at least a publisher has subscribed to it via {@link Publisher#subscribe(Subscriber)} + * before use assert methods. + * @param n Number of elements to request (can be 0 if you want no initial demand). + * @see #subscribe(Publisher, long) + * @param the observed value type + * @return a fresh TestSubscriber instance + */ + public static TestSubscriber create(long n) { + return new TestSubscriber<>(n); + } + + /** + * Create a new {@link TestSubscriber} that requests an unbounded number of elements, + * and make the specified {@code publisher} subscribe to it. + * @param publisher The publisher to subscribe with + * @param the observed value type + * @return a fresh TestSubscriber instance + */ + public static TestSubscriber subscribe(Publisher publisher) { + TestSubscriber subscriber = new TestSubscriber<>(); + publisher.subscribe(subscriber); + return subscriber; + } + + /** + * Create a new {@link TestSubscriber} that requests initially {@code n} elements, + * and make the specified {@code publisher} subscribe to it. You can then manage the + * demand with {@link Subscription#request(long)}. + * @param publisher The publisher to subscribe with + * @param n Number of elements to request (can be 0 if you want no initial demand). + * @param the observed value type + * @return a fresh TestSubscriber instance + */ + public static TestSubscriber subscribe(Publisher publisher, long n) { + TestSubscriber subscriber = new TestSubscriber<>(n); + publisher.subscribe(subscriber); + return subscriber; + } + +// ============================================================================================================== +// Private constructors +// ============================================================================================================== + + private TestSubscriber() { + this(Long.MAX_VALUE); + } + + private TestSubscriber(long n) { + if (n < 0) { + throw new IllegalArgumentException("initialRequest >= required but it was " + n); + } + REQUESTED.lazySet(this, n); + } + +// ============================================================================================================== +// Configuration +// ============================================================================================================== + + + /** + * Enable or disabled the values storage. It is enabled by default, and can be disable + * in order to be able to perform performance benchmarks or tests with a huge amount + * values. + * @param enabled enable value storage? + * @return this + */ + public final TestSubscriber configureValuesStorage(boolean enabled) { + this.valuesStorage = enabled; + return this; + } + + /** + * Configure the timeout in seconds for waiting next values to be received (3 seconds + * by default). + * @param timeout the new default value timeout duration + * @return this + */ + public final TestSubscriber configureValuesTimeout(Duration timeout) { + this.valuesTimeout = timeout; + return this; + } + + /** + * Returns the established fusion mode or -1 if it was not enabled + * + * @return the fusion mode, see Fuseable constants + */ + public final int establishedFusionMode() { + return establishedFusionMode; + } + +// ============================================================================================================== +// Assertions +// ============================================================================================================== + + /** + * Assert a complete successfully signal has been received. + * @return this + */ + public final TestSubscriber assertComplete() { + assertNoError(); + int c = completionCount; + if (c == 0) { + throw new AssertionError("Not completed", null); + } + if (c > 1) { + throw new AssertionError("Multiple completions: " + c, null); + } + return this; + } + + /** + * Assert the specified values have been received. Values storage should be enabled to + * use this method. + * @param expectedValues the values to assert + * @see #configureValuesStorage(boolean) + * @return this + */ + public final TestSubscriber assertContainValues(Set expectedValues) { + if (!valuesStorage) { + throw new IllegalStateException( + "Using assertNoValues() requires enabling values storage"); + } + if (expectedValues.size() > values.size()) { + throw new AssertionError("Actual contains fewer elements" + values, null); + } + + Iterator expected = expectedValues.iterator(); + + for (; ; ) { + boolean n2 = expected.hasNext(); + if (n2) { + T t2 = expected.next(); + if (!values.contains(t2)) { + throw new AssertionError("The element is not contained in the " + + "received resuls" + + " = " + valueAndClass(t2), null); + } + } + else{ + break; + } + } + return this; + } + + /** + * Assert an error signal has been received. + * @return this + */ + public final TestSubscriber assertError() { + assertNotComplete(); + int s = errors.size(); + if (s == 0) { + throw new AssertionError("No error", null); + } + if (s > 1) { + throw new AssertionError("Multiple errors: " + s, null); + } + return this; + } + + /** + * Assert an error signal has been received. + * @param clazz The class of the exception contained in the error signal + * @return this + */ + public final TestSubscriber assertError(Class clazz) { + assertNotComplete(); + int s = errors.size(); + if (s == 0) { + throw new AssertionError("No error", null); + } + if (s == 1) { + Throwable e = errors.get(0); + if (!clazz.isInstance(e)) { + throw new AssertionError("Error class incompatible: expected = " + + clazz + ", actual = " + e, null); + } + } + if (s > 1) { + throw new AssertionError("Multiple errors: " + s, null); + } + return this; + } + + public final TestSubscriber assertErrorMessage(String message) { + assertNotComplete(); + int s = errors.size(); + if (s == 0) { + assertionError("No error", null); + } + if (s == 1) { + if (!Objects.equals(message, + errors.get(0) + .getMessage())) { + assertionError("Error class incompatible: expected = \"" + message + + "\", actual = \"" + errors.get(0).getMessage() + "\"", null); + } + } + if (s > 1) { + assertionError("Multiple errors: " + s, null); + } + + return this; + } + + /** + * Assert an error signal has been received. + * @param expectation A method that can verify the exception contained in the error signal + * and throw an exception (like an {@link AssertionError}) if the exception is not valid. + * @return this + */ + public final TestSubscriber assertErrorWith(Consumer expectation) { + assertNotComplete(); + int s = errors.size(); + if (s == 0) { + throw new AssertionError("No error", null); + } + if (s == 1) { + expectation.accept(errors.get(0)); + } + if (s > 1) { + throw new AssertionError("Multiple errors: " + s, null); + } + return this; + } + + /** + * Assert that the upstream was a Fuseable source. + * + * @return this + */ + public final TestSubscriber assertFuseableSource() { + if (qs == null) { + throw new AssertionError("Upstream was not Fuseable"); + } + return this; + } + + /** + * Assert that the fusion mode was granted. + * + * @return this + */ + public final TestSubscriber assertFusionEnabled() { + if (establishedFusionMode != Fuseable.SYNC && establishedFusionMode != Fuseable.ASYNC) { + throw new AssertionError("Fusion was not enabled"); + } + return this; + } + + public final TestSubscriber assertFusionMode(int expectedMode) { + if (establishedFusionMode != expectedMode) { + throw new AssertionError("Wrong fusion mode: expected: " + fusionModeName( + expectedMode) + ", actual: " + fusionModeName(establishedFusionMode)); + } + return this; + } + + /** + * Assert that the fusion mode was granted. + * + * @return this + */ + public final TestSubscriber assertFusionRejected() { + if (establishedFusionMode != Fuseable.NONE) { + throw new AssertionError("Fusion was granted"); + } + return this; + } + + /** + * Assert no error signal has been received. + * @return this + */ + public final TestSubscriber assertNoError() { + int s = errors.size(); + if (s == 1) { + Throwable e = errors.get(0); + String valueAndClass = e == null ? null : e + " (" + e.getClass().getSimpleName() + ")"; + throw new AssertionError("Error present: " + valueAndClass, null); + } + if (s > 1) { + throw new AssertionError("Multiple errors: " + s, null); + } + return this; + } + + /** + * Assert no values have been received. + * + * @return this + */ + public final TestSubscriber assertNoValues() { + if (valueCount != 0) { + throw new AssertionError("No values expected but received: [length = " + values.size() + "] " + values, + null); + } + return this; + } + + /** + * Assert that the upstream was not a Fuseable source. + * @return this + */ + public final TestSubscriber assertNonFuseableSource() { + if (qs != null) { + throw new AssertionError("Upstream was Fuseable"); + } + return this; + } + + /** + * Assert no complete successfully signal has been received. + * @return this + */ + public final TestSubscriber assertNotComplete() { + int c = completionCount; + if (c == 1) { + throw new AssertionError("Completed", null); + } + if (c > 1) { + throw new AssertionError("Multiple completions: " + c, null); + } + return this; + } + + /** + * Assert no subscription occurred. + * + * @return this + */ + public final TestSubscriber assertNotSubscribed() { + int s = subscriptionCount; + + if (s == 1) { + throw new AssertionError("OnSubscribe called once", null); + } + if (s > 1) { + throw new AssertionError("OnSubscribe called multiple times: " + s, null); + } + + return this; + } + + /** + * Assert no complete successfully or error signal has been received. + * @return this + */ + public final TestSubscriber assertNotTerminated() { + if (cdl.getCount() == 0) { + throw new AssertionError("Terminated", null); + } + return this; + } + + /** + * Assert subscription occurred (once). + * @return this + */ + public final TestSubscriber assertSubscribed() { + int s = subscriptionCount; + + if (s == 0) { + throw new AssertionError("OnSubscribe not called", null); + } + if (s > 1) { + throw new AssertionError("OnSubscribe called multiple times: " + s, null); + } + + return this; + } + + /** + * Assert either complete successfully or error signal has been received. + * @return this + */ + public final TestSubscriber assertTerminated() { + if (cdl.getCount() != 0) { + throw new AssertionError("Not terminated", null); + } + return this; + } + + /** + * Assert {@code n} values has been received. + * + * @param n the expected value count + * + * @return this + */ + public final TestSubscriber assertValueCount(long n) { + if (valueCount != n) { + throw new AssertionError("Different value count: expected = " + n + ", actual = " + valueCount, + null); + } + return this; + } + + /** + * Assert the specified values have been received in the same order read by the + * passed {@link Iterable}. Values storage + * should be enabled to + * use this method. + * @param expectedSequence the values to assert + * @see #configureValuesStorage(boolean) + * @return this + */ + public final TestSubscriber assertValueSequence(Iterable expectedSequence) { + if (!valuesStorage) { + throw new IllegalStateException("Using assertNoValues() requires enabling values storage"); + } + Iterator actual = values.iterator(); + Iterator expected = expectedSequence.iterator(); + int i = 0; + for (; ; ) { + boolean n1 = actual.hasNext(); + boolean n2 = expected.hasNext(); + if (n1 && n2) { + T t1 = actual.next(); + T t2 = expected.next(); + if (!Objects.equals(t1, t2)) { + throw new AssertionError("The element with index " + i + " does not match: expected = " + valueAndClass(t2) + ", actual = " + + valueAndClass( + t1), null); + } + i++; + } else if (n1 && !n2) { + throw new AssertionError("Actual contains more elements" + values, null); + } else if (!n1 && n2) { + throw new AssertionError("Actual contains fewer elements: " + values, null); + } else { + break; + } + } + return this; + } + + /** + * Assert the specified values have been received in the declared order. Values + * storage should be enabled to use this method. + * + * @param expectedValues the values to assert + * + * @return this + * + * @see #configureValuesStorage(boolean) + */ + @SafeVarargs + public final TestSubscriber assertValues(T... expectedValues) { + return assertValueSequence(Arrays.asList(expectedValues)); + } + + /** + * Assert the specified values have been received in the declared order. Values + * storage should be enabled to use this method. + * + * @param expectations One or more methods that can verify the values and throw a + * exception (like an {@link AssertionError}) if the value is not valid. + * + * @return this + * + * @see #configureValuesStorage(boolean) + */ + @SafeVarargs + public final TestSubscriber assertValuesWith(Consumer... expectations) { + if (!valuesStorage) { + throw new IllegalStateException( + "Using assertNoValues() requires enabling values storage"); + } + final int expectedValueCount = expectations.length; + if (expectedValueCount != values.size()) { + throw new AssertionError("Different value count: expected = " + expectedValueCount + ", actual = " + valueCount, null); + } + for (int i = 0; i < expectedValueCount; i++) { + Consumer consumer = expectations[i]; + T actualValue = values.get(i); + consumer.accept(actualValue); + } + return this; + } + +// ============================================================================================================== +// Await methods +// ============================================================================================================== + + /** + * Blocking method that waits until a complete successfully or error signal is received. + * @return this + */ + public final TestSubscriber await() { + if (cdl.getCount() == 0) { + return this; + } + try { + cdl.await(); + } catch (InterruptedException ex) { + throw new AssertionError("Wait interrupted", ex); + } + return this; + } + + /** + * Blocking method that waits until a complete successfully or error signal is received + * or until a timeout occurs. + * @param timeout The timeout value + * @return this + */ + public final TestSubscriber await(Duration timeout) { + if (cdl.getCount() == 0) { + return this; + } + try { + if (!cdl.await(timeout.toMillis(), TimeUnit.MILLISECONDS)) { + throw new AssertionError("No complete or error signal before timeout"); + } + return this; + } + catch (InterruptedException ex) { + throw new AssertionError("Wait interrupted", ex); + } + } + + /** + * Blocking method that waits until {@code n} next values have been received. + * + * @param n the value count to assert + * + * @return this + */ + public final TestSubscriber awaitAndAssertNextValueCount(final long n) { + await(valuesTimeout, () -> { + if(valuesStorage){ + return String.format("%d out of %d next values received within %d, " + + "values : %s", + valueCount - nextValueAssertedCount, + n, + valuesTimeout.toMillis(), + values.toString() + ); + } + return String.format("%d out of %d next values received within %d", + valueCount - nextValueAssertedCount, + n, + valuesTimeout.toMillis()); + }, () -> valueCount >= (nextValueAssertedCount + n)); + nextValueAssertedCount += n; + return this; + } + + /** + * Blocking method that waits until {@code n} next values have been received (n is the + * number of values provided) to assert them. + * + * @param values the values to assert + * + * @return this + */ + @SafeVarargs + @SuppressWarnings("unchecked") + public final TestSubscriber awaitAndAssertNextValues(T... values) { + final int expectedNum = values.length; + final List> expectations = new ArrayList<>(); + for (int i = 0; i < expectedNum; i++) { + final T expectedValue = values[i]; + expectations.add(actualValue -> { + if (!actualValue.equals(expectedValue)) { + throw new AssertionError(String.format( + "Expected Next signal: %s, but got: %s", + expectedValue, + actualValue)); + } + }); + } + awaitAndAssertNextValuesWith(expectations.toArray((Consumer[]) new Consumer[0])); + return this; + } + + /** + * Blocking method that waits until {@code n} next values have been received + * (n is the number of expectations provided) to assert them. + * @param expectations One or more methods that can verify the values and throw a + * exception (like an {@link AssertionError}) if the value is not valid. + * @return this + */ + @SafeVarargs + public final TestSubscriber awaitAndAssertNextValuesWith(Consumer... expectations) { + valuesStorage = true; + final int expectedValueCount = expectations.length; + await(valuesTimeout, () -> { + if(valuesStorage){ + return String.format("%d out of %d next values received within %d, " + + "values : %s", + valueCount - nextValueAssertedCount, + expectedValueCount, + valuesTimeout.toMillis(), + values.toString() + ); + } + return String.format("%d out of %d next values received within %d ms", + valueCount - nextValueAssertedCount, + expectedValueCount, + valuesTimeout.toMillis()); + }, () -> valueCount >= (nextValueAssertedCount + expectedValueCount)); + List nextValuesSnapshot; + List empty = new ArrayList<>(); + for(;;){ + nextValuesSnapshot = values; + if(NEXT_VALUES.compareAndSet(this, values, empty)){ + break; + } + } + if (nextValuesSnapshot.size() < expectedValueCount) { + throw new AssertionError(String.format("Expected %d number of signals but received %d", + expectedValueCount, + nextValuesSnapshot.size())); + } + for (int i = 0; i < expectedValueCount; i++) { + Consumer consumer = expectations[i]; + T actualValue = nextValuesSnapshot.get(i); + consumer.accept(actualValue); + } + nextValueAssertedCount += expectedValueCount; + return this; + } + +// ============================================================================================================== +// Overrides +// ============================================================================================================== + + @Override + public void cancel() { + Subscription a = s; + if (a != Operators.cancelledSubscription()) { + a = S.getAndSet(this, Operators.cancelledSubscription()); + if (a != null && a != Operators.cancelledSubscription()) { + a.cancel(); + } + } + } + + @Override + public final boolean isCancelled() { + return s == Operators.cancelledSubscription(); + } + + @Override + public final boolean isStarted() { + return s != null; + } + + @Override + public final boolean isTerminated() { + return isCancelled(); + } + + @Override + public void onComplete() { + completionCount++; + cdl.countDown(); + } + + @Override + public void onError(Throwable t) { + errors.add(t); + cdl.countDown(); + } + + @Override + public void onNext(T t) { + if (establishedFusionMode == Fuseable.ASYNC) { + for (; ; ) { + t = qs.poll(); + if (t == null) { + break; + } + valueCount++; + if (valuesStorage) { + List nextValuesSnapshot; + for (; ; ) { + nextValuesSnapshot = values; + nextValuesSnapshot.add(t); + if (NEXT_VALUES.compareAndSet(this, + nextValuesSnapshot, + nextValuesSnapshot)) { + break; + } + } + } + } + } + else { + valueCount++; + if (valuesStorage) { + List nextValuesSnapshot; + for (; ; ) { + nextValuesSnapshot = values; + nextValuesSnapshot.add(t); + if (NEXT_VALUES.compareAndSet(this, + nextValuesSnapshot, + nextValuesSnapshot)) { + break; + } + } + } + } + } + + @Override + @SuppressWarnings("unchecked") + public void onSubscribe(Subscription s) { + subscriptionCount++; + int requestMode = requestedFusionMode; + if (requestMode >= 0) { + if (!setWithoutRequesting(s)) { + if (!isCancelled()) { + errors.add(new IllegalStateException("Subscription already set: " + + subscriptionCount)); + } + } else { + if (s instanceof Fuseable.QueueSubscription) { + this.qs = (Fuseable.QueueSubscription)s; + + int m = qs.requestFusion(requestMode); + establishedFusionMode = m; + + if (m == Fuseable.SYNC) { + for (;;) { + T v = qs.poll(); + if (v == null) { + onComplete(); + break; + } + + onNext(v); + } + } + else { + requestDeferred(); + } + } + else { + requestDeferred(); + } + } + } else { + if (!set(s)) { + if (!isCancelled()) { + errors.add(new IllegalStateException("Subscription already set: " + + subscriptionCount)); + } + } + } + } + + @Override + public void request(long n) { + if (Operators.validate(n)) { + if (establishedFusionMode != Fuseable.SYNC) { + normalRequest(n); + } + } + } + + @Override + public final long requestedFromDownstream() { + return requested; + } + + /** + * Setup what fusion mode should be requested from the incomining + * Subscription if it happens to be QueueSubscription + * @param requestMode the mode to request, see Fuseable constants + * @return this + */ + public final TestSubscriber requestedFusionMode(int requestMode) { + this.requestedFusionMode = requestMode; + return this; + } + + @Override + public Subscription upstream() { + return s; + } + + +// ============================================================================================================== +// Non public methods +// ============================================================================================================== + + protected final void normalRequest(long n) { + Subscription a = s; + if (a != null) { + a.request(n); + } else { + Operators.addAndGet(REQUESTED, this, n); + + a = s; + + if (a != null) { + long r = REQUESTED.getAndSet(this, 0L); + + if (r != 0L) { + a.request(r); + } + } + } + } + + /** + * Requests the deferred amount if not zero. + */ + protected final void requestDeferred() { + long r = REQUESTED.getAndSet(this, 0L); + + if (r != 0L) { + s.request(r); + } + } + + /** + * Atomically sets the single subscription and requests the missed amount from it. + * + * @param s + * @return false if this arbiter is cancelled or there was a subscription already set + */ + protected final boolean set(Subscription s) { + Objects.requireNonNull(s, "s"); + Subscription a = this.s; + if (a == Operators.cancelledSubscription()) { + s.cancel(); + return false; + } + if (a != null) { + s.cancel(); + Operators.reportSubscriptionSet(); + return false; + } + + if (S.compareAndSet(this, null, s)) { + + long r = REQUESTED.getAndSet(this, 0L); + + if (r != 0L) { + s.request(r); + } + + return true; + } + + a = this.s; + + if (a != Operators.cancelledSubscription()) { + s.cancel(); + return false; + } + + Operators.reportSubscriptionSet(); + return false; + } + + /** + * Sets the Subscription once but does not request anything. + * @param s the Subscription to set + * @return true if successful, false if the current subscription is not null + */ + protected final boolean setWithoutRequesting(Subscription s) { + Objects.requireNonNull(s, "s"); + for (;;) { + Subscription a = this.s; + if (a == Operators.cancelledSubscription()) { + s.cancel(); + return false; + } + if (a != null) { + s.cancel(); + Operators.reportSubscriptionSet(); + return false; + } + + if (S.compareAndSet(this, null, s)) { + return true; + } + } + } + + /** + * Prepares and throws an AssertionError exception based on the message, cause, the + * active state and the potential errors so far. + * + * @param message the message + * @param cause the optional Throwable cause + * + * @throws AssertionError as expected + */ + protected final void assertionError(String message, Throwable cause) { + StringBuilder b = new StringBuilder(); + + if (cdl.getCount() != 0) { + b.append("(active) "); + } + b.append(message); + + List err = errors; + if (!err.isEmpty()) { + b.append(" (+ ") + .append(err.size()) + .append(" errors)"); + } + AssertionError e = new AssertionError(b.toString(), cause); + + for (Throwable t : err) { + e.addSuppressed(t); + } + + throw e; + } + + protected final String fusionModeName(int mode) { + switch (mode) { + case -1: + return "Disabled"; + case Fuseable.NONE: + return "None"; + case Fuseable.SYNC: + return "Sync"; + case Fuseable.ASYNC: + return "Async"; + default: + return "Unknown(" + mode + ")"; + } + } + + protected final String valueAndClass(Object o) { + if (o == null) { + return null; + } + return o + " (" + o.getClass().getSimpleName() + ")"; + } + +} \ No newline at end of file diff --git a/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml b/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml new file mode 100644 index 000000000..09059b8fc --- /dev/null +++ b/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + diff --git a/src/main/asciidoc/index.adoc b/src/main/asciidoc/index.adoc index aee79f8ae..22dbd91be 100644 --- a/src/main/asciidoc/index.adoc +++ b/src/main/asciidoc/index.adoc @@ -26,7 +26,9 @@ include::{spring-data-commons-docs}/repositories.adoc[] :leveloffset: +1 include::reference/introduction.adoc[] include::reference/mongodb.adoc[] +include::reference/reactive-mongodb.adoc[] include::reference/mongo-repositories.adoc[] +include::reference/reactive-mongo-repositories.adoc[] include::{spring-data-commons-docs}/auditing.adoc[] include::reference/mongo-auditing.adoc[] include::reference/mapping.adoc[] diff --git a/src/main/asciidoc/reference/reactive-mongo-repositories.adoc b/src/main/asciidoc/reference/reactive-mongo-repositories.adoc new file mode 100644 index 000000000..bce3e8d4b --- /dev/null +++ b/src/main/asciidoc/reference/reactive-mongo-repositories.adoc @@ -0,0 +1,226 @@ +[[mongo.reactive.repositories]] += Reactive MongoDB repositories + +[[mongo.reactive.repositories.intro]] +== Introduction + +This chapter will point out the specialties for reactive repository support for MongoDB. This builds on the core repository support explained in <>. So make sure you've got a sound understanding of the basic concepts explained there. + +[[mongo.reactive.repositories.libraries]] +== Reactive Composition Libraries + +The reactive space offers various reactive composition libraries. The most common libraries are https://github.com/ReactiveX/RxJava[RxJava] and https://projectreactor.io/[Project Reactor]. + +Spring Data MongoDB is built on top of the MongoDB Reactive Streams driver to provide maximal interoperability relying on the http://www.reactive-streams.org/[Reactive Streams] initiative. Static APIs such as `ReactiveMongoOperations` are provided by using Project Reactor's `Flux` and `Mono` types. Project Reactor offers various adapters to convert reactive wrapper types (`Flux` to `Observable` and vice versa) but conversion can easily clutter your code. + +Spring Data's Repository abstraction is a dynamic API, mostly defined by you and your requirements, as you're declaring query methods. Reactive MongoDB repositories can be either implemented using RxJava or Project Reactor wrapper types by simply extending from one of the library-specific repository interfaces: + +* `ReactiveCrudRepository` +* `ReactivePagingAndSortingRepository` +* `RxJavaCrudRepository` +* `RxJavaPagingAndSortingRepository` + +Spring Data converts reactive wrapper types behind the scenes so that you can stick to your favorite composition library. + +[[mongo.reactive.repositories.usage]] +== Usage + +To access domain entities stored in a MongoDB you can leverage our sophisticated repository support that eases implementing those quite significantly. To do so, simply create an interface for your repository: + +.Sample Person entity +==== +[source,java] +---- +public class Person { + + @Id + private String id; + private String firstname; + private String lastname; + private Address address; + + // … getters and setters omitted +} +---- +==== + +We have a quite simple domain object here. Note that it has a property named `id` of type `ObjectId`. The default serialization mechanism used in `MongoTemplate` (which is backing the repository support) regards properties named id as document id. Currently we support `String`, `ObjectId` and `BigInteger` as id-types. + +.Basic repository interface to persist Person entities +==== +[source] +---- +public interface ReactivePersonRepository extends ReactivePagingAndSortingRepository { + + Flux findByFirstname(String firstname); + + Flux findByFirstname(Publisher firstname); + + Flux findByFirstnameOrderByLastname(String firstname, Pageable pageable); + + Mono findByFirstnameAndLastname(String firstname, String lastname); +} +---- +==== + +For JavaConfig use the `@EnableReactiveMongoRepositories` annotation. The annotation carries the very same attributes like the namespace element. If no base package is configured the infrastructure will scan the package of the annotated configuration class. + +NOTE: MongoDB uses two different drivers for blocking and reactive (non-blocking) data access. It's required to create a connection using the Reactive Streams driver to provide the required infrastructure for Spring Data's Reactive MongoDB support hence you're required to provide a separate Configuration for MongoDB's Reactive Streams driver. Please also note that your application will operate on two different connections if using Reactive and Blocking Spring Data MongoDB Templates and Repositories. + +.JavaConfig for repositories +==== +[source,java] +---- +@Configuration +@EnableReactiveMongoRepositories +class ApplicationConfig extends AbstractReactiveMongoConfiguration { + + @Override + protected String getDatabaseName() { + return "e-store"; + } + + @Override + public MongoClient mongoClient() { + return MongoClients.create(); + } + + @Override + protected String getMappingBasePackage() { + return "com.oreilly.springdata.mongodb" + } +} +---- +==== + +As our domain repository extends `ReactivePagingAndSortingRepository` it provides you with CRUD operations as well as methods for paginated and sorted access to the entities. Working with the repository instance is just a matter of dependency injecting it into a client. So accessing the second page of `Person` s at a page size of 10 would simply look something like this: + +.Paging access to Person entities +==== +[source,java] +---- +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration +public class PersonRepositoryTests { + + @Autowired ReactivePersonRepository repository; + + @Test + public void readsFirstPageCorrectly() { + + Mono> persons = repository.findAll(new PageRequest(0, 10)); + } + + @Test + public void readsFirstPageAsStream() { + + Flux persons = repository.findAll(new PageRequest(0, 10)); + } +} +---- +==== + +The sample creates an application context with Spring's unit test support which will perform annotation based dependency injection into test cases. Inside the test method we simply use the repository to query the datastore. We hand the repository a `PageRequest` instance that requests the first page of persons at a page size of 10. + +[[mongo.reactive.repositories.features]] +== Features + +Spring Data's Reactive MongoDB support comes with a reduced feature set compared to the blocking <>. + +Following features are supported: + +* Query Methods using <> +* <> +* <> +* <> +* <> +* <> + +Reactive Repositories do not support Type-safe Query methods using QueryDSL. + +[[mongodb.reactive.repositories.queries.geo-spatial]] +=== Geo-spatial repository queries + +As you've just seen there are a few keywords triggering geo-spatial operations within a MongoDB query. The `Near` keyword allows some further modification. Let's have look at some examples: + +.Advanced `Near` queries +==== +[source,java] +---- +public interface PersonRepository extends ReactiveMongoRepository + + // { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}} + Flux findByLocationNear(Point location, Distance distance); +} +---- +==== + +Adding a `Distance` parameter to the query method allows restricting results to those within the given distance. If the `Distance` was set up containing a `Metric` we will transparently use `$nearSphere` instead of $code. + +NOTE: Reactive Geo-spatial repository queries support the domain type and `GeoResult` results within a reactive wrapper type. `GeoPage` and `GeoResults` are not supported as they contradict the deferred result approach with pre-calculating the average distance. Howevery, you can still pass in a `Pageable` argument to page results yourself. + +.Using `Distance` with `Metrics` +==== +[source,java] +---- +Point point = new Point(43.7, 48.8); +Distance distance = new Distance(200, Metrics.KILOMETERS); +… = repository.findByLocationNear(point, distance); +// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}} +---- +==== + +As you can see using a `Distance` equipped with a `Metric` causes `$nearSphere` clause to be added instead of a plain `$near`. Beyond that the actual distance gets calculated according to the `Metrics` used. + +NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of `$nearSphere` operator. + +==== Geo-near queries + +[source,java] +---- +public interface PersonRepository extends ReactiveMongoRepository + + // {'geoNear' : 'location', 'near' : [x, y] } + Flux> findByLocationNear(Point location); + + // No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance } + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance, + // 'distanceMultiplier' : metric.multiplier, 'spherical' : true } + Flux> findByLocationNear(Point location, Distance distance); + + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min, + // 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier, + // 'spherical' : true } + Flux> findByLocationNear(Point location, Distance min, Distance max); + + // {'geoNear' : 'location', 'near' : [x, y] } + Flux> findByLocationNear(Point location); +} +---- + +[[mongo.reactive.repositories.infinite-streams]] +== Infinite Streams + +By default, MongoDB will automatically close a cursor when the client has exhausted all results in the cursor. Closing a cursors turns a Stream into a finite stream. However, for capped collections you may use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client exhausts the results in the initial cursor. Using Tailable Cursors with a reactive approach allows construction of infinite streams. A Tailable Cursor remains open until it's closed. It emits data as data arrives in a capped collection. Using Tailable Cursors with Collections is not possible as its result would never complete. + +Spring Data MongoDB Reactive Repository support supports infinite streams by annotating a query method with `@InfiniteStream`. This works for methods returning `Flux` or `Observable` wrapper types. + +[source,java] +---- + +public interface PersonRepository extends ReactiveMongoRepository { + + @InfiniteStream + Flux findByFirstname(String firstname); + +} + +Flux stream = repository.findByFirstname("Joe"); + +Cancellation cancellation = stream.doOnNext(person -> System.out.println(person)).subscribe(); + +// … + +// Later: Dispose the stream +cancellation.dispose(); +---- diff --git a/src/main/asciidoc/reference/reactive-mongodb.adoc b/src/main/asciidoc/reference/reactive-mongodb.adoc new file mode 100644 index 000000000..28ea856d5 --- /dev/null +++ b/src/main/asciidoc/reference/reactive-mongodb.adoc @@ -0,0 +1,542 @@ +[[mongo.reactive]] += Reactive MongoDB support + +The reactive MongoDB support contains a basic set of features which are summarized below. + +* Spring configuration support using Java based @Configuration classes a Mongo client instance and replica sets +* `ReactiveMongoTemplate` helper class that increases productivity using Mongo operations in a reactive manner. Includes integrated object mapping between documents and POJOs. +* Exception translation into Spring's portable Data Access Exception hierarchy +* Feature Rich Object Mapping integrated with Spring's Conversion Service +* Annotation based mapping metadata but extensible to support other metadata formats +* Persistence and mapping lifecycle events +* Java based Query, Criteria, and Update DSLs +* Automatic implementation of reactive Repository interfaces including support for custom finder methods. + +For most tasks you will find yourself using `ReactiveMongoTemplate` or the Repository support that both leverage the rich mapping functionality. `ReactiveMongoTemplate` is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations. `ReactiveMongoTemplate` also provides callback methods so that it is easy for you to get a hold of the low level API artifacts such as `MongoDatabase` to communicate directly with MongoDB. The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so you can easily map your existing knowledge onto the Spring APIs. + +[[mongodb-reactive-getting-started]] +== Getting Started + +Spring MongoDB support requires MongoDB 2.6 or higher and Java SE 8 or higher. + +First you need to set up a running Mongodb server. Refer to the http://docs.mongodb.org/manual/core/introduction/[Mongodb Quick Start guide] for an explanation on how to startup a MongoDB instance. Once installed starting MongoDB is typically a matter of executing the following command: `MONGO_HOME/bin/mongod` + +To create a Spring project in STS go to File -> New -> Spring Template Project -> Simple Spring Utility Project -> press Yes when prompted. Then enter a project and a package name such as org.spring.mongodb.example. + +Then add the following to pom.xml dependencies section. + +[source,xml] +---- + + + + + + org.springframework.data + spring-data-mongodb + {version} + + + + org.mongodb + mongodb-driver-reactivestreams + {mongo.reactivestreams} + + + + io.projectreactor + reactor-core + {reactor} + + + +---- + +NOTE: MongoDB uses two different drivers for blocking and reactive (non-blocking) data access. While blocking operations are provided by default, you're have to opt-in for reactive usage. + +Create a simple Person class to persist: + +[source,java] +---- +package org.spring.mongodb.example; + +public class Person { + + private String id; + private String name; + private int age; + + public Person(String name, int age) { + this.name = name; + this.age = age; + } + + public String getId() { + return id; + } + public String getName() { + return name; + } + public int getAge() { + return age; + } + + @Override + public String toString() { + return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; + } +} +---- + +And a main application to run + +[source,java] +---- +package org.spring.mongodb.example; + +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.query.Query; + +import com.mongodb.reactivestreams.client.MongoClients; + +public class ReactiveMongoApp { + + private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApp.class); + + public static void main(String[] args) throws Exception { + + CountDownLatch latch = new CountDownLatch(1); + + ReactiveMongoTemplate mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database"); + + mongoOps.insert(new Person("Joe", 34)) + + .flatMap(p -> mongoOps.findOne(new Query(where("name").is("Joe")), Person.class)) + + .doOnNext(person -> log.info(person.toString())) + + .flatMap(person -> mongoOps.dropCollection("person")) + + .doOnComplete(latch::countDown) + + .subscribe(); + + latch.await(); + } +} +---- + +This will produce the following output + +[source] +---- +2016-09-20 14:56:57,373 DEBUG .index.MongoPersistentEntityIndexCreator: 124 - Analyzing class class example.ReactiveMongoApp$Person for index information. +2016-09-20 14:56:57,452 DEBUG .data.mongodb.core.ReactiveMongoTemplate: 975 - Inserting Document containing fields: [_class, name, age] in collection: person +2016-09-20 14:56:57,541 DEBUG .data.mongodb.core.ReactiveMongoTemplate:1503 - findOne using query: { "name" : "Joe"} fields: null for class: class example.ReactiveMongoApp$Person in collection: person +2016-09-20 14:56:57,545 DEBUG .data.mongodb.core.ReactiveMongoTemplate:1979 - findOne using query: { "name" : "Joe"} in db.collection: database.person +2016-09-20 14:56:57,567 INFO example.ReactiveMongoApp: 43 - Person [id=57e1321977ac501c68d73104, name=Joe, age=34] +2016-09-20 14:56:57,573 DEBUG .data.mongodb.core.ReactiveMongoTemplate: 528 - Dropped collection [person] +---- + +Even in this simple example, there are few things to take notice of + +* You can instantiate the central helper class of Spring Mongo, <>, using the standard `com.mongodb.reactivestreams.client.MongoClient` object and the name of the database to use. +* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <>.). +* Conventions are used for handling the id field, converting it to be a ObjectId when stored in the database. +* Mapping conventions can use field access. Notice the Person class has only getters. +* If the constructor argument names match the field names of the stored document, they will be used to instantiate the object + +There is an https://github.com/spring-projects/spring-data-examples[github repository with several examples] that you can download and play around with to get a feel for how the library works. + +[[mongo.reactive.driver]] +== Connecting to MongoDB with Spring and the Reactive Streams Driver + +One of the first tasks when using MongoDB and Spring is to create a `com.mongodb.reactivestreams.client.MongoClient` object using the IoC container. + +[[mongo.reactive.mongo-java-config]] +=== Registering a MongoClient instance using Java based metadata + +An example of using Java based bean metadata to register an instance of a `com.mongodb.reactivestreams.client.MongoClient` is shown below + +.Registering a com.mongodb.Mongo object using Java based bean metadata +==== +[source,java] +---- +@Configuration +public class AppConfig { + + /* + * Use the Reactive Streams Mongo Client API to create a com.mongodb.reactivestreams.client.MongoClient instance. + */ + public @Bean MongoClient mongoClient() { + return MongoClients.create("mongodb://localhost"); + } +} +---- +==== + +This approach allows you to use the standard `com.mongodb.reactivestreams.client.MongoClient` API that you may already be used to using. + +An alternative is to register an instance of `com.mongodb.reactivestreams.client.MongoClient` instance with the container using Spring's `ReactiveMongoClientFactoryBean`. As compared to instantiating a `com.mongodb.reactivestreams.client.MongoClient` instance directly, the FactoryBean approach has the added advantage of also providing the container with an ExceptionTranslator implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and use of `@Repository` is described in http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/html/dao.html[Spring's DAO support features]. + +An example of a Java based bean metadata that supports exception translation on `@Repository` annotated classes is shown below: + +.Registering a com.mongodb.Mongo object using Spring's MongoClientFactoryBean and enabling Spring's exception translation support +==== +[source,java] +---- +@Configuration +public class AppConfig { + + /* + * Factory bean that creates the com.mongodb.reactivestreams.client.MongoClient instance + */ + public @Bean ReactiveMongoClientFactoryBean mongoClient() { + + ReactiveMongoClientFactoryBean mongoClient = new ReactiveMongoClientFactoryBean(); + mongoClient.setHost("localhost"); + + return mongoClient; + } +} +---- +==== + +To access the `com.mongodb.reactivestreams.client.MongoClient` object created by the `ReactiveMongoClientFactoryBean` in other `@Configuration` or your own classes, use a `private @Autowired MongoClient mongoClient;` field. + + +[[mongo.mongo-db-factory]] +=== The ReactiveMongoDatabaseFactory interface + +While `com.mongodb.reactivestreams.client.MongoClient` is the entry point to the reactive MongoDB driver API, connecting to a specific MongoDB database instance requires additional information such as the database name. With that information you can obtain a `com.mongodb.reactivestreams.client.MongoDatabase` object and access all the functionality of a specific MongoDB database instance. Spring provides the `org.springframework.data.mongodb.core.ReactiveMongoDatabaseFactory` interface shown below to bootstrap connectivity to the database. + +[source,java] +---- +public interface ReactiveMongoDatabaseFactory { + + /** + * Creates a default {@link MongoDatabase} instance. + * + * @return + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase() throws DataAccessException; + + /** + * Creates a {@link MongoDatabase} instance to access the database with the given name. + * + * @param dbName must not be {@literal null} or empty. + * @return + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; + + /** + * Exposes a shared {@link MongoExceptionTranslator}. + * + * @return will never be {@literal null}. + */ + PersistenceExceptionTranslator getExceptionTranslator(); +} +---- + +The class `org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory` provides implements the ReactiveMongoDatabaseFactory interface and is created with a standard `com.mongodb.reactivestreams.client.MongoClient` instance and the database name. + +Instead of using the IoC container to create an instance of ReactiveMongoTemplate, you can just use them in standard Java code as shown below. + +[source,java] +---- +public class MongoApp { + + private static final Log log = LogFactory.getLog(MongoApp.class); + + public static void main(String[] args) throws Exception { + + ReactiveMongoOperations mongoOps = new ReactiveMongoOperations(*new SimpleReactiveMongoDatabaseFactory(MongoClient.create(), "database")*); + + mongoOps.insert(new Person("Joe", 34)) + + .flatMap(p -> mongoOps.findOne(new Query(where("name").is("Joe")), Person.class)) + + .doOnNext(person -> log.info(person.toString())) + + .flatMap(person -> mongoOps.dropCollection("person")) + + .subscribe(); + } +} +---- + +The code in bold highlights the use of SimpleMongoDbFactory and is the only difference between the listing shown in the <>. + +[[mongo.mongo-db-factory-java]] +=== Registering a ReactiveMongoDatabaseFactory instance using Java based metadata + +To register a ReactiveMongoDatabaseFactory instance with the container, you write code much like what was highlighted in the previous code listing. A simple example is shown below + +[source,java] +---- +@Configuration +public class MongoConfiguration { + + public @Bean ReactiveMongoDatabaseFactory mongoDatabaseFactory() { + return new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database"); + } +} +---- + +To define the username and password create MongoDB connection string and pass it into the factory method as shown below. This listing also shows using `ReactiveMongoDatabaseFactory` register an instance of `ReactiveMongoTemplate` with the container. + +[source,java] +---- +@Configuration +public class MongoConfiguration { + + public @Bean ReactiveMongoDatabaseFactory mongoDatabaseFactory() { + return new SimpleMongoDbFactory(MongoClients.create("mongodb://joe:secret@localhost"), "database", userCredentials); + } + + public @Bean ReactiveMongoTemplate reactiveMongoTemplate() { + return new ReactiveMongoTemplate(mongoDatabaseFactory()); + } +} +---- + +[[mongo.reactive.template]] +== Introduction to ReactiveMongoTemplate + +The class `ReactiveMongoTemplate`, located in the package `org.springframework.data.mongodb`, is the central class of the Spring's Reactive MongoDB support providing a rich feature set to interact with the database. The template offers convenience operations to create, update, delete and query for MongoDB documents and provides a mapping between your domain objects and MongoDB documents. + +NOTE: Once configured, `ReactiveMongoTemplate` is thread-safe and can be reused across multiple instances. + +The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the interface `MongoConverter`. Spring provides a default implementation with `MongoMappingConverter`, but you can also write your own converter. Please refer to the section on MongoConverters for more detailed information. + +The `ReactiveMongoTemplate` class implements the interface `ReactiveMongoOperations`. In as much as possible, the methods on `ReactiveMongoOperations` are named after methods available on the MongoDB driver `Collection` object as as to make the API familiar to existing MongoDB developers who are used to the driver API. For example, you will find methods such as "find", "findAndModify", "findOne", "insert", "remove", "save", "update" and "updateMulti". The design goal was to make it as easy as possible to transition between the use of the base MongoDB driver and `ReactiveMongoOperations`. A major difference in between the two APIs is that ReactiveMongoOperations can be passed domain objects instead of `Document` and there are fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `Document` to specify the parameters for those operations. + +NOTE: The preferred way to reference the operations on `ReactiveMongoTemplate` instance is via its interface `ReactiveMongoOperations`. + +The default converter implementation used by `ReactiveMongoTemplate` is `MappingMongoConverter`. While the `MappingMongoConverter` can make use of additional metadata to specify the mapping of objects to documents it is also capable of converting objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names. These conventions as well as the use of mapping annotations is explained in the <>. + +Another central feature of `ReactiveMongoTemplate` is exception translation of exceptions thrown in the MongoDB Java driver into Spring's portable Data Access Exception hierarchy. Refer to the section on <> for more information. + +While there are many convenience methods on `ReactiveMongoTemplate` to help you easily perform common tasks if you should need to access the MongoDB driver API directly to access functionality not explicitly exposed by the MongoTemplate you can use one of several Execute callback methods to access underlying driver APIs. The execute callbacks will give you a reference to either a `com.mongodb.reactivestreams.client.MongoCollection` or a `com.mongodb.reactivestreams.client.MongoDatabase` object. Please see the section <> for more information. + +Now let's look at a examples of how to work with the `ReactiveMongoTemplate` in the context of the Spring container. + +[[mongo.reactive.template.instantiating]] +=== Instantiating ReactiveMongoTemplate + +You can use Java to create and register an instance of `ReactiveMongoTemplate` as shown below. + +.Registering a `com.mongodb.reactivestreams.client.MongoClient` object and enabling Spring's exception translation support +==== +[source,java] +---- +@Configuration +public class AppConfig { + + public @Bean MongoClient mongoClient() { + return MongoClients.create("mongodb://localhost"); + } + + public @Bean ReactiveMongoTemplate reactiveMongoTemplate() { + return new ReactiveMongoTemplate(mongoClient(), "mydatabase"); + } +} +---- +==== + +There are several overloaded constructors of ReactiveMongoTemplate. These are + +* `ReactiveMongoTemplate(MongoClient mongo, String databaseName)` - takes the `com.mongodb.Mongo` object and the default database name to operate against. +* `ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory)` - takes a ReactiveMongoDatabaseFactory object that encapsulated the `com.mongodb.reactivestreams.client.MongoClient` object and database name. +* `ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter)` - adds a `MongoConverter` to use for mapping. + +Other optional properties that you might like to set when creating a `ReactiveMongoTemplate` are the default `WriteResultCheckingPolicy`, `WriteConcern`, and `ReadPreference`. + +NOTE: The preferred way to reference the operations on `ReactiveMongoTemplate` instance is via its interface `ReactiveMongoOperations`. + + +[[mongo.reactive.template.writeresultchecking]] +=== WriteResultChecking Policy + +When in development it is very handy to either log or throw an exception if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully but in fact the database was not modified according to your expectations. Set MongoTemplate's property to an enum with the following values, `LOG`, `EXCEPTION`, or `NONE` to either log the error, throw and exception or do nothing. The default is to use a `WriteResultChecking` value of `NONE`. + + +[[mongo.reactive.template.writeconcern]] +=== WriteConcern + +You can set the `com.mongodb.WriteConcern` property that the `ReactiveMongoTemplate` will use for write operations if it has not yet been specified via the driver at a higher level such as `MongoDatabase`. If ReactiveMongoTemplate's `WriteConcern` property is not set it will default to the one set in the MongoDB driver's DB or Collection setting. + + +[[mongo.reactive.template.writeconcernresolver]] +=== WriteConcernResolver + +For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert and save operations), a strategy interface called `WriteConcernResolver` can be configured on `ReactiveMongoTemplate`. Since `ReactiveMongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The `WriteConcernResolver` interface is shown below. + +[source,java] +---- +public interface WriteConcernResolver { + WriteConcern resolve(MongoAction action); +} +---- + +The passed in argument, `MongoAction`, is what you use to determine the `WriteConcern` value to be used or to use the value of the Template itself as a default. `MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `DBObject`, as well as the operation as an enumeration (`MongoActionOperation`: REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE) and a few other pieces of contextual information. For example, + +[source] +---- +private class MyAppWriteConcernResolver implements WriteConcernResolver { + + public WriteConcern resolve(MongoAction action) { + if (action.getEntityClass().getSimpleName().contains("Audit")) { + return WriteConcern.NONE; + } else if (action.getEntityClass().getSimpleName().contains("Metadata")) { + return WriteConcern.JOURNAL_SAFE; + } + return action.getDefaultWriteConcern(); + } +} +---- + + +[[mongo.reactive.template.save-update-remove]] +== Saving, Updating, and Removing Documents + +`ReactiveMongoTemplate` provides a simple way for you to save, update, and delete your domain objects and map those objects to documents stored in MongoDB. + +Given a simple class such as Person + +[source,java] +---- +public class Person { + + private String id; + private String name; + private int age; + + public Person(String name, int age) { + this.name = name; + this.age = age; + } + + public String getId() { + return id; + } + public String getName() { + return name; + } + public int getAge() { + return age; + } + + @Override + public String toString() { + return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; + } + +} +---- + +You can save, update and delete the object as shown below. + +[source,java] +---- +package org.spring.mongodb.example; + +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.query.Query; + +import com.mongodb.reactivestreams.client.MongoClients; + +public class ReactiveMongoApp { + + private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApp.class); + + public static void main(String[] args) throws Exception { + + CountDownLatch latch = new CountDownLatch(1); + + ReactiveMongoTemplate mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database"); + + mongoOps.insert(new Person("Joe", 34)).doOnNext(person -> log.info("Insert: " + person)) + + .flatMap(person -> mongoOps.findById(person.getId(), Person.class)) + + .doOnNext(person -> log.info("Found: " + person)) + + .zipWith(person -> mongoOps.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class)) + + .flatMap(tuple -> mongoOps.remove(tuple.getT1())).flatMap(deleteResult -> mongoOps.findAll(Person.class)) + + .count().doOnSuccess(count -> { + log.info("Number of people: " + count); + latch.countDown(); + }) + + .subscribe(); + + latch.await(); + } +} +---- + +There was implicit conversion using the `MongoConverter` between a `String` and `ObjectId` as stored in the database and recognizing a convention of the property "Id" name. + +NOTE: This example is meant to show the use of save, update and remove operations on `ReactiveMongoTemplate` and not to show complex mapping or functional chaining functionality + +The query syntax used in the example is explained in more detail in the section <>. Additional documentation can be found in <> section. + +[[mongo.reactive.tailcursors]] +== Infinite Streams + +By default, MongoDB will automatically close a cursor when the client has exhausted all results in the cursor. Closing a cursors turns a Stream into a finite stream. However, for capped collections you may use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client exhausts the results in the initial cursor. Using Tailable Cursors with a reactive approach allows construction of infinite streams. A Tailable Cursor remains open until it's closed. It emits data as data arrives in a capped collection. Using Tailable Cursors with Collections is not possible as its result would never complete. + +[source,java] +---- +Flux stream = template.tail(query(where("name").is("Joe")), Person.class); + +Cancellation cancellation = stream.doOnNext(person -> System.out.println(person)).subscribe(); + +// … + +// Later: Dispose the stream +cancellation.dispose(); +---- + + +[[mongo.reactive.executioncallback]] +== Execution callbacks + +One common design feature of all Spring template classes is that all functionality is routed into one of the templates execute callback methods. This helps ensure that exceptions and any resource management that maybe required are performed consistency. While this was of much greater need in the case of JDBC and JMS than with MongoDB, it still offers a single spot for exception translation and logging to occur. As such, using the execute callback is the preferred way to access the MongoDB driver's `MongoDatabase` and `MongoCollection` objects to perform uncommon operations that were not exposed as methods on `ReactiveMongoTemplate`. + +Here is a list of execute callback methods. + +* ` Flux` *execute* `(Class entityClass, ReactiveCollectionCallback action)` Executes the given ReactiveCollectionCallback for the entity collection of the specified class. + +* ` Flux` *execute* `(String collectionName, ReactiveCollectionCallback action)` Executes the given ReactiveCollectionCallback on the collection of the given name. + +* ` Flux` *execute* `(ReactiveDatabaseCallback action)` Executes a ReactiveDatabaseCallback translating any exceptions as necessary. + +Here is an example that uses the `ReactiveCollectionCallback` to return information about an index + +[source,java] +---- +Flux hasIndex = template.execute("geolocation", collection -> { + + List indexes = template.indexOps(collection.getNamespace().getCollectionName()).getIndexInfo(); + for (IndexInfo dbo : indexes) { + if ("location_2d".equals(dbo.getName())) { + return Mono.just(true); + } + } + return Mono.just(false); +}); +----