diff --git a/pom.xml b/pom.xml index f8b1965de..ff93af8ea 100644 --- a/pom.xml +++ b/pom.xml @@ -30,6 +30,7 @@ spring-data-mongodb 2.0.0.BUILD-SNAPSHOT 3.2.2 + 1.2.0 diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml index 96e56bd02..4a4916871 100644 --- a/spring-data-mongodb-cross-store/pom.xml +++ b/spring-data-mongodb-cross-store/pom.xml @@ -51,6 +51,14 @@ 2.0.0.BUILD-SNAPSHOT + + + io.projectreactor + reactor-core + ${reactor} + true + + org.aspectj aspectjrt diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 9754e8545..c8151d666 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -19,6 +19,7 @@ 1.3 1.5 3.3.0 + 5.0.0.BUILD-SNAPSHOT @@ -79,6 +80,52 @@ true + + + org.mongodb + mongodb-driver-reactivestreams + ${mongo.reactivestreams} + true + + + + org.mongodb + mongodb-driver-async + ${mongo} + true + + + org.mongodb + mongodb-driver-core + + + org.mongodb + bson + + + + + + io.projectreactor + reactor-core + ${reactor} + true + + + + io.reactivex + rxjava + ${rxjava} + true + + + + io.reactivex + rxjava-reactive-streams + ${rxjava-reactive-streams} + true + + javax.enterprise @@ -213,9 +260,11 @@ **/PerformanceTests.java + **/ReactivePerformanceTests.java src/test/resources/logging.properties + true diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java new file mode 100644 index 000000000..52e80b953 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java @@ -0,0 +1,56 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb; + +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; + +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Interface for factories creating reactive {@link MongoDatabase} instances. + * + * @author Mark Paluch + * @since 2.0 + */ +public interface ReactiveMongoDatabaseFactory { + + /** + * Creates a default {@link MongoDatabase} instance. + * + * @return + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase() throws DataAccessException; + + /** + * Creates a {@link MongoDatabase} instance to access the database with the given name. + * + * @param dbName must not be {@literal null} or empty. + * @return + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; + + /** + * Exposes a shared {@link MongoExceptionTranslator}. + * + * @return will never be {@literal null}. + */ + PersistenceExceptionTranslator getExceptionTranslator(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java index e1e92eee5..a72521ade 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java @@ -15,36 +15,16 @@ */ package org.springframework.data.mongodb.config; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; import org.springframework.context.annotation.Configuration; -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.type.filter.AnnotationTypeFilter; -import org.springframework.data.annotation.Persistent; import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; -import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; -import org.springframework.data.mapping.model.FieldNamingStrategy; -import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.SimpleMongoDbFactory; -import org.springframework.data.mongodb.core.convert.CustomConversions; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.support.CachingIsNewStrategyFactory; -import org.springframework.data.support.IsNewStrategyFactory; -import org.springframework.util.ClassUtils; -import org.springframework.util.StringUtils; import com.mongodb.Mongo; import com.mongodb.MongoClient; @@ -57,16 +37,11 @@ import com.mongodb.MongoClient; * @author Thomas Darimont * @author Ryan Tenney * @author Christoph Strobl + * @author Mark Paluch + * @see MongoConfigurationSupport */ @Configuration -public abstract class AbstractMongoConfiguration { - - /** - * Return the name of the database to connect to. - * - * @return must not be {@literal null}. - */ - protected abstract String getDatabaseName(); +public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport { /** * Return the name of the authentication database to use. Defaults to {@literal null} and will turn into the value @@ -120,7 +95,7 @@ public abstract class AbstractMongoConfiguration { * class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending * {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is * overridden to implement alternate behavior. - * + * * @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for * entities. * @deprecated use {@link #getMappingBasePackages()} instead. @@ -132,20 +107,6 @@ public abstract class AbstractMongoConfiguration { return mappingBasePackage == null ? null : mappingBasePackage.getName(); } - /** - * Returns the base packages to scan for MongoDB mapped entities at startup. Will return the package name of the - * configuration class' (the concrete class, not this one here) by default. So if you have a - * {@code com.acme.AppConfig} extending {@link AbstractMongoConfiguration} the base package will be considered - * {@code com.acme} unless the method is overridden to implement alternate behavior. - * - * @return the base packages to scan for mapped {@link Document} classes or an empty collection to not enable scanning - * for entities. - * @since 1.10 - */ - protected Collection getMappingBasePackages() { - return Collections.singleton(getMappingBasePackage()); - } - /** * Return {@link UserCredentials} to be used when connecting to the MongoDB instance or {@literal null} if none shall * be used. @@ -159,47 +120,6 @@ public abstract class AbstractMongoConfiguration { return null; } - /** - * Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package. - * - * @see #getMappingBasePackage() - * @return - * @throws ClassNotFoundException - */ - @Bean - public MongoMappingContext mongoMappingContext() throws ClassNotFoundException { - - MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(getInitialEntitySet()); - mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder()); - mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); - - return mappingContext; - } - - /** - * Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}. - * - * @return - * @throws ClassNotFoundException - */ - @Bean - public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException { - return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(mongoMappingContext())); - } - - /** - * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These - * {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and - * {@link #mongoMappingContext()}. Returns an empty {@link CustomConversions} instance by default. - * - * @return must not be {@literal null}. - */ - @Bean - public CustomConversions customConversions() { - return new CustomConversions(Collections.emptyList()); - } - /** * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. @@ -219,79 +139,4 @@ public abstract class AbstractMongoConfiguration { return converter; } - - /** - * Scans the mapping base package for classes annotated with {@link Document}. By default, it scans for entities in - * all packages returned by {@link #getMappingBasePackages()}. - * - * @see #getMappingBasePackages() - * @return - * @throws ClassNotFoundException - */ - protected Set> getInitialEntitySet() throws ClassNotFoundException { - - Set> initialEntitySet = new HashSet>(); - - for (String basePackage : getMappingBasePackages()) { - initialEntitySet.addAll(scanForEntities(basePackage)); - } - - return initialEntitySet; - } - - /** - * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and - * {@link Persistent}. - * - * @param basePackage must not be {@literal null}. - * @return - * @throws ClassNotFoundException - * @since 1.10 - */ - protected Set> scanForEntities(String basePackage) throws ClassNotFoundException { - - if (!StringUtils.hasText(basePackage)) { - return Collections.emptySet(); - } - - Set> initialEntitySet = new HashSet>(); - - if (StringUtils.hasText(basePackage)) { - - ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider( - false); - componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); - componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); - - for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { - - initialEntitySet - .add(ClassUtils.forName(candidate.getBeanClassName(), AbstractMongoConfiguration.class.getClassLoader())); - } - } - - return initialEntitySet; - } - - /** - * Configures whether to abbreviate field names for domain objects by configuring a - * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced - * customization needs, consider overriding {@link #mappingMongoConverter()}. - * - * @return - */ - protected boolean abbreviateFieldNames() { - return false; - } - - /** - * Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created. - * - * @return - * @since 1.5 - */ - protected FieldNamingStrategy fieldNamingStrategy() { - return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy() - : PropertyNameFieldNamingStrategy.INSTANCE; - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java new file mode 100644 index 000000000..3468a5ce9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java @@ -0,0 +1,90 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Base class for reactive Spring Data MongoDB configuration using JavaConfig. + * + * @author Mark Paluch + * @since 2.0 + * @see MongoConfigurationSupport + */ +@Configuration +public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport { + + /** + * Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a + * {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. + * + * @return + */ + public abstract MongoClient mongoClient(); + + /** + * Creates a {@link ReactiveMongoTemplate}. + * + * @return + */ + @Bean + public ReactiveMongoTemplate reactiveMongoTemplate() throws Exception { + return new ReactiveMongoTemplate(mongoDbFactory(), mappingMongoConverter()); + } + + /** + * Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link Mongo} instance + * configured in {@link #mongoClient()}. + * + * @see #mongoClient() + * @see #reactiveMongoTemplate() + * @return + * @throws Exception + */ + @Bean + public SimpleReactiveMongoDatabaseFactory mongoDbFactory() { + return new SimpleReactiveMongoDatabaseFactory(mongoClient(), getDatabaseName()); + } + + /** + * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and + * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. + * + * @see #customConversions() + * @see #mongoMappingContext() + * @see #mongoDbFactory() + * @return + * @throws Exception + */ + @Bean + public MappingMongoConverter mappingMongoConverter() throws Exception { + + MappingMongoConverter converter = new MappingMongoConverter(ReactiveMongoTemplate.NO_OP_REF_RESOLVER, + mongoMappingContext()); + converter.setCustomConversions(customConversions()); + + return converter; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java index 3aae75689..0c2000291 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java @@ -67,7 +67,7 @@ import org.w3c.dom.Element; /** * Bean definition parser for the {@code mapping-converter} element. - * + * * @author Jon Brisbin * @author Oliver Gierke * @author Maciej Walkowiak @@ -120,6 +120,12 @@ public class MappingMongoConverterParser implements BeanDefinitionParser { converterBuilder.addPropertyValue("customConversions", conversionsDefinition); } + if(!registry.containsBeanDefinition("indexOperationsProvider")){ + BeanDefinitionBuilder indexOperationsProviderBuilder = BeanDefinitionBuilder.genericBeanDefinition("org.springframework.data.mongodb.core.DefaultIndexOperationsProvider"); + indexOperationsProviderBuilder.addConstructorArgReference(dbFactoryRef); + parserContext.registerBeanComponent(new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider")); + } + try { registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME); } catch (NoSuchBeanDefinitionException ignored) { @@ -129,7 +135,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser { BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoPersistentEntityIndexCreator.class); indexHelperBuilder.addConstructorArgReference(ctxRef); - indexHelperBuilder.addConstructorArgReference(dbFactoryRef); + indexHelperBuilder.addConstructorArgReference("indexOperationsProvider"); indexHelperBuilder.addDependsOn(ctxRef); parserContext.registerBeanComponent(new BeanComponentDefinition(indexHelperBuilder.getBeanDefinition(), @@ -348,7 +354,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser { /** * {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches. - * + * * @author Oliver Gierke */ private static class NegatingFilter implements TypeFilter { @@ -357,7 +363,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser { /** * Creates a new {@link NegatingFilter} with the given delegates. - * + * * @param filters */ public NegatingFilter(TypeFilter... filters) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java new file mode 100644 index 000000000..7fb731ce5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java @@ -0,0 +1,198 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.config; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.type.filter.AnnotationTypeFilter; +import org.springframework.data.annotation.Persistent; +import org.springframework.data.authentication.UserCredentials; +import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; +import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; +import org.springframework.data.mapping.model.FieldNamingStrategy; +import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; +import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.convert.CustomConversions; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.support.CachingIsNewStrategyFactory; +import org.springframework.data.support.IsNewStrategyFactory; +import org.springframework.util.ClassUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.Mongo; +import com.mongodb.MongoClient; + +/** + * Base class for Spring Data MongoDB to be extended for JavaConfiguration usage. + * + * @author Mark Paluch + * @since 2.0 + */ +public abstract class MongoConfigurationSupport { + + /** + * Return the name of the database to connect to. + * + * @return must not be {@literal null}. + */ + protected abstract String getDatabaseName(); + + /** + * Returns the base packages to scan for MongoDB mapped entities at startup. Will return the package name of the + * configuration class' (the concrete class, not this one here) by default. So if you have a + * {@code com.acme.AppConfig} extending {@link MongoConfigurationSupport} the base package will be considered + * {@code com.acme} unless the method is overridden to implement alternate behavior. + * + * @return the base packages to scan for mapped {@link Document} classes or an empty collection to not enable scanning + * for entities. + * @since 1.10 + */ + protected Collection getMappingBasePackages() { + + Package mappingBasePackage = getClass().getPackage(); + return Collections.singleton(mappingBasePackage == null ? null : mappingBasePackage.getName()); + } + + /** + * Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package. + * + * @see #getMappingBasePackage() + * @return + * @throws ClassNotFoundException + */ + @Bean + public MongoMappingContext mongoMappingContext() throws ClassNotFoundException { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setInitialEntitySet(getInitialEntitySet()); + mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder()); + mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); + + return mappingContext; + } + + /** + * Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}. + * + * @return + * @throws ClassNotFoundException + */ + @Bean + public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException { + return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(mongoMappingContext())); + } + + /** + * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These + * {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and + * {@link #mongoMappingContext()}. Returns an empty {@link CustomConversions} instance by default. + * + * @return must not be {@literal null}. + */ + @Bean + public CustomConversions customConversions() { + return new CustomConversions(Collections.emptyList()); + } + + /** + * Scans the mapping base package for classes annotated with {@link Document}. By default, it scans for entities in + * all packages returned by {@link #getMappingBasePackages()}. + * + * @see #getMappingBasePackages() + * @return + * @throws ClassNotFoundException + */ + protected Set> getInitialEntitySet() throws ClassNotFoundException { + + Set> initialEntitySet = new HashSet>(); + + for (String basePackage : getMappingBasePackages()) { + initialEntitySet.addAll(scanForEntities(basePackage)); + } + + return initialEntitySet; + } + + /** + * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and + * {@link Persistent}. + * + * @param basePackage must not be {@literal null}. + * @return + * @throws ClassNotFoundException + * @since 1.10 + */ + protected Set> scanForEntities(String basePackage) throws ClassNotFoundException { + + if (!StringUtils.hasText(basePackage)) { + return Collections.emptySet(); + } + + Set> initialEntitySet = new HashSet>(); + + if (StringUtils.hasText(basePackage)) { + + ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider( + false); + componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); + componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); + + for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { + + initialEntitySet + .add(ClassUtils.forName(candidate.getBeanClassName(), MongoConfigurationSupport.class.getClassLoader())); + } + } + + return initialEntitySet; + } + + /** + * Configures whether to abbreviate field names for domain objects by configuring a + * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced + * customization needs, consider overriding {@link #mappingMongoConverter()}. + * + * @return + */ + protected boolean abbreviateFieldNames() { + return false; + } + + /** + * Configures a {@link FieldNamingStrategy} on the {@link MongoMappingContext} instance created. + * + * @return + * @since 1.5 + */ + protected FieldNamingStrategy fieldNamingStrategy() { + return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy() + : PropertyNameFieldNamingStrategy.INSTANCE; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java index e9b3c501f..5ccdf3822 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java @@ -15,18 +15,15 @@ */ package org.springframework.data.mongodb.core; -import static org.springframework.data.domain.Sort.Direction.*; +import static org.springframework.data.mongodb.core.MongoTemplate.potentiallyConvertRuntimeException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.List; -import java.util.concurrent.TimeUnit; import org.bson.Document; import org.springframework.dao.DataAccessException; +import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.index.IndexDefinition; -import org.springframework.data.mongodb.core.index.IndexField; import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.util.Assert; @@ -42,28 +39,25 @@ import com.mongodb.client.model.IndexOptions; * @author Oliver Gierke * @author Komi Innocent * @author Christoph Strobl + * @author Mark Paluch */ public class DefaultIndexOperations implements IndexOperations { - private static final Double ONE = Double.valueOf(1); - private static final Double MINUS_ONE = Double.valueOf(-1); - private static final Collection TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere"); - - private final MongoOperations mongoOperations; + private final MongoDbFactory mongoDbFactory; private final String collectionName; /** * Creates a new {@link DefaultIndexOperations}. * - * @param mongoOperations must not be {@literal null}. + * @param mongoDbFactory must not be {@literal null}. * @param collectionName must not be {@literal null}. */ - public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName) { + public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!"); Assert.notNull(collectionName, "Collection name can not be null!"); - this.mongoOperations = mongoOperations; + this.mongoDbFactory = mongoDbFactory; this.collectionName = collectionName; } @@ -72,57 +66,18 @@ public class DefaultIndexOperations implements IndexOperations { * @see org.springframework.data.mongodb.core.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) */ public void ensureIndex(final IndexDefinition indexDefinition) { - mongoOperations.execute(collectionName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + execute(collection -> { - Document indexOptions = indexDefinition.getIndexOptions(); + Document indexOptions = indexDefinition.getIndexOptions(); - if (indexOptions != null) { + if (indexOptions != null) { - IndexOptions ops = new IndexOptions(); - if (indexOptions.containsKey("name")) { - ops = ops.name(indexOptions.get("name").toString()); - } - if (indexOptions.containsKey("unique")) { - ops = ops.unique((Boolean) indexOptions.get("unique")); - } - if (indexOptions.containsKey("sparse")) { - ops = ops.sparse((Boolean) indexOptions.get("sparse")); - } - if (indexOptions.containsKey("background")) { - ops = ops.background((Boolean) indexOptions.get("background")); - } - if (indexOptions.containsKey("expireAfterSeconds")) { - ops = ops.expireAfter((Long) indexOptions.get("expireAfterSeconds"), TimeUnit.SECONDS); - } - if (indexOptions.containsKey("min")) { - ops = ops.min(((Number) indexOptions.get("min")).doubleValue()); - } - if (indexOptions.containsKey("max")) { - ops = ops.max(((Number) indexOptions.get("max")).doubleValue()); - } - if (indexOptions.containsKey("bits")) { - ops = ops.bits((Integer) indexOptions.get("bits")); - } - if (indexOptions.containsKey("bucketSize")) { - ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); - } - if (indexOptions.containsKey("default_language")) { - ops = ops.defaultLanguage(indexOptions.get("default_language").toString()); - } - if (indexOptions.containsKey("language_override")) { - ops = ops.languageOverride(indexOptions.get("language_override").toString()); - } - if (indexOptions.containsKey("weights")) { - ops = ops.weights((Document) indexOptions.get("weights")); - } - - collection.createIndex(indexDefinition.getIndexKeys(), ops); - } else { - collection.createIndex(indexDefinition.getIndexKeys()); - } - return null; + IndexOptions ops = IndexConverters.DEFINITION_TO_MONGO_INDEX_OPTIONS.convert(indexDefinition); + collection.createIndex(indexDefinition.getIndexKeys(), ops); + } else { + collection.createIndex(indexDefinition.getIndexKeys()); } + return null; }); } @@ -131,7 +86,7 @@ public class DefaultIndexOperations implements IndexOperations { * @see org.springframework.data.mongodb.core.IndexOperations#dropIndex(java.lang.String) */ public void dropIndex(final String name) { - mongoOperations.execute(collectionName, new CollectionCallback() { + execute(new CollectionCallback() { public Void doInCollection(MongoCollection collection) throws MongoException, DataAccessException { collection.dropIndex(name); return null; @@ -154,7 +109,7 @@ public class DefaultIndexOperations implements IndexOperations { */ public List getIndexInfo() { - return mongoOperations.execute(collectionName, new CollectionCallback>() { + return execute(new CollectionCallback>() { public List doInCollection(MongoCollection collection) throws MongoException, DataAccessException { @@ -169,47 +124,24 @@ public class DefaultIndexOperations implements IndexOperations { while (cursor.hasNext()) { Document ix = cursor.next(); - Document keyDocument = (Document) ix.get("key"); - int numberOfElements = keyDocument.keySet().size(); - - List indexFields = new ArrayList(numberOfElements); - - for (String key : keyDocument.keySet()) { - - Object value = keyDocument.get(key); - - if (TWO_D_IDENTIFIERS.contains(value)) { - indexFields.add(IndexField.geo(key)); - } else if ("text".equals(value)) { - - Document weights = (Document) ix.get("weights"); - for (String fieldName : weights.keySet()) { - indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString()))); - } - - } else { - - Double keyValue = new Double(value.toString()); - - if (ONE.equals(keyValue)) { - indexFields.add(IndexField.create(key, ASC)); - } else if (MINUS_ONE.equals(keyValue)) { - indexFields.add(IndexField.create(key, DESC)); - } - } - } - - String name = ix.get("name").toString(); - - boolean unique = ix.containsKey("unique") ? (Boolean) ix.get("unique") : false; - boolean dropDuplicates = ix.containsKey("dropDups") ? (Boolean) ix.get("dropDups") : false; - boolean sparse = ix.containsKey("sparse") ? (Boolean) ix.get("sparse") : false; - String language = ix.containsKey("default_language") ? (String) ix.get("default_language") : ""; - indexInfoList.add(new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language)); + IndexInfo indexInfo = IndexConverters.DOCUMENT_INDEX_INFO.convert(ix); + indexInfoList.add(indexInfo); } return indexInfoList; } }); } + + public T execute(CollectionCallback callback) { + + Assert.notNull(callback); + + try { + MongoCollection collection = mongoDbFactory.getDb().getCollection(collectionName); + return callback.doInCollection(collection); + } catch (RuntimeException e) { + throw potentiallyConvertRuntimeException(e, mongoDbFactory.getExceptionTranslator()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java new file mode 100644 index 000000000..f01f95670 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java @@ -0,0 +1,45 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.MongoDbFactory; + +/** + * {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}. TODO: Review + * me + * + * @author Mark Paluch + * @since 2.0 + */ +class DefaultIndexOperationsProvider implements IndexOperationsProvider { + + private final MongoDbFactory mongoDbFactory; + + /** + * @param mongoDbFactory must not be {@literal null}. + */ + DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory) { + this.mongoDbFactory = mongoDbFactory; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.IndexOperationsProvider#reactiveIndexOps(java.lang.String) + */ + @Override + public IndexOperations indexOps(String collectionName) { + return new DefaultIndexOperations(mongoDbFactory, collectionName); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java new file mode 100644 index 000000000..adb168ecb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java @@ -0,0 +1,102 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.util.Assert; + +import com.mongodb.reactivestreams.client.ListIndexesPublisher; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Default implementation of {@link IndexOperations}. + * + * @author Mark Paluch + * @since 1.11 + */ +public class DefaultReactiveIndexOperations implements ReactiveIndexOperations { + + private final ReactiveMongoOperations mongoOperations; + private final String collectionName; + + /** + * Creates a new {@link DefaultReactiveIndexOperations}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + */ + public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName) { + + Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!"); + Assert.notNull(collectionName, "Collection must not be null!"); + + this.mongoOperations = mongoOperations; + this.collectionName = collectionName; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveIndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) + */ + public Mono ensureIndex(final IndexDefinition indexDefinition) { + + return mongoOperations.execute(collectionName, (ReactiveCollectionCallback) collection -> { + + Document indexOptions = indexDefinition.getIndexOptions(); + + if (indexOptions != null) { + return collection.createIndex(indexDefinition.getIndexKeys(), + IndexConverters.DEFINITION_TO_MONGO_INDEX_OPTIONS.convert(indexDefinition)); + } + + return collection.createIndex(indexDefinition.getIndexKeys()); + }).next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveIndexOperations#dropIndex(java.lang.String) + */ + public Mono dropIndex(final String name) { + + return mongoOperations.execute(collectionName, collection -> { + + return Mono.from(collection.dropIndex(name)); + }).flatMap(success -> Mono.empty()).next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveIndexOperations#dropAllIndexes() + */ + public Mono dropAllIndexes() { + return dropIndex("*"); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveIndexOperations#getIndexInfo() + */ + public Flux getIndexInfo() { + + return mongoOperations.execute(collectionName, collection -> { + + ListIndexesPublisher indexesPublisher = collection.listIndexes(Document.class); + + return Flux.from(indexesPublisher).map(t -> IndexConverters.DOCUMENT_INDEX_INFO.convert(t)); + }); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java new file mode 100644 index 000000000..a41203d38 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java @@ -0,0 +1,34 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import com.mongodb.DBCursor; +import com.mongodb.reactivestreams.client.FindPublisher; + +/** + * Simple callback interface to allow customization of a {@link FindPublisher}. + * + * @author Mark Paluch + */ +interface FindPublisherPreparer { + + /** + * Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor. + * + * @param cursor + */ + FindPublisher prepare(FindPublisher findPublisher); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java new file mode 100644 index 000000000..44ea31941 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -0,0 +1,158 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import static org.springframework.data.domain.Sort.Direction.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexField; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.model.IndexOptions; + +/** + * {@link Converter Converters} for index-related MongoDB documents/types. + * + * @author Mark Paluch + * @since 2.0 + */ +abstract class IndexConverters { + + public final static Converter DEFINITION_TO_MONGO_INDEX_OPTIONS; + public final static Converter DOCUMENT_INDEX_INFO; + + private static final Double ONE = Double.valueOf(1); + private static final Double MINUS_ONE = Double.valueOf(-1); + private static final Collection TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere"); + + static { + + DEFINITION_TO_MONGO_INDEX_OPTIONS = getIndexDefinitionIndexOptionsConverter(); + DOCUMENT_INDEX_INFO = getDocumentIndexInfoConverter(); + } + + private IndexConverters() { + + } + + private static Converter getIndexDefinitionIndexOptionsConverter() { + + return indexDefinition -> { + + Document indexOptions = indexDefinition.getIndexOptions(); + IndexOptions ops = new IndexOptions(); + + if (indexOptions.containsKey("name")) { + ops = ops.name(indexOptions.get("name").toString()); + } + if (indexOptions.containsKey("unique")) { + ops = ops.unique((Boolean) indexOptions.get("unique")); + } + if (indexOptions.containsKey("sparse")) { + ops = ops.sparse((Boolean) indexOptions.get("sparse")); + } + if (indexOptions.containsKey("background")) { + ops = ops.background((Boolean) indexOptions.get("background")); + } + if (indexOptions.containsKey("expireAfterSeconds")) { + ops = ops.expireAfter((Long) indexOptions.get("expireAfterSeconds"), TimeUnit.SECONDS); + } + if (indexOptions.containsKey("min")) { + ops = ops.min(((Number) indexOptions.get("min")).doubleValue()); + } + if (indexOptions.containsKey("max")) { + ops = ops.max(((Number) indexOptions.get("max")).doubleValue()); + } + if (indexOptions.containsKey("bits")) { + ops = ops.bits((Integer) indexOptions.get("bits")); + } + if (indexOptions.containsKey("bucketSize")) { + ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); + } + if (indexOptions.containsKey("default_language")) { + ops = ops.defaultLanguage(indexOptions.get("default_language").toString()); + } + if (indexOptions.containsKey("language_override")) { + ops = ops.languageOverride(indexOptions.get("language_override").toString()); + } + if (indexOptions.containsKey("weights")) { + ops = ops.weights((org.bson.Document) indexOptions.get("weights")); + } + + for (String key : indexOptions.keySet()) { + if (ObjectUtils.nullSafeEquals("2dsphere", indexOptions.get(key))) { + ops = ops.sphereVersion(2); + } + } + + return ops; + }; + } + + private static Converter getDocumentIndexInfoConverter() { + + return ix -> { + Document keyDbObject = (Document) ix.get("key"); + int numberOfElements = keyDbObject.keySet().size(); + + List indexFields = new ArrayList(numberOfElements); + + for (String key : keyDbObject.keySet()) { + + Object value = keyDbObject.get(key); + + if (TWO_D_IDENTIFIERS.contains(value)) { + indexFields.add(IndexField.geo(key)); + } else if ("text".equals(value)) { + + Document weights = (Document) ix.get("weights"); + for (String fieldName : weights.keySet()) { + indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString()))); + } + + } else { + + Double keyValue = new Double(value.toString()); + + if (ONE.equals(keyValue)) { + indexFields.add(IndexField.create(key, ASC)); + } else if (MINUS_ONE.equals(keyValue)) { + indexFields.add(IndexField.create(key, DESC)); + } + } + } + + String name = ix.get("name").toString(); + + boolean unique = ix.containsKey("unique") ? (Boolean) ix.get("unique") : false; + boolean dropDuplicates = ix.containsKey("dropDups") ? (Boolean) ix.get("dropDups") : false; + boolean sparse = ix.containsKey("sparse") ? (Boolean) ix.get("sparse") : false; + + String language = ix.containsKey("default_language") ? (String) ix.get("default_language") : ""; + return new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language); + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperationsProvider.java new file mode 100644 index 000000000..de2102ede --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexOperationsProvider.java @@ -0,0 +1,33 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import org.springframework.dao.support.PersistenceExceptionTranslator; + +/** + * TODO: Revisit for a better pattern. + * @author Mark Paluch + */ +public interface IndexOperationsProvider { + + /** + * Returns the operations that can be performed on indexes + * + * @return index operations on the named collection + */ + IndexOperations indexOps(String collectionName); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index 407a80c81..a8d89a0e8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -145,7 +145,7 @@ import com.mongodb.util.JSONParseException; * @author Mark Paluch */ @SuppressWarnings("deprecation") -public class MongoTemplate implements MongoOperations, ApplicationContextAware { +public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider { private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class); private static final String ID_FIELD = "_id"; @@ -230,7 +230,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { mappingContext = this.mongoConverter.getMappingContext(); // We create indexes based on mapping events if (null != mappingContext && mappingContext instanceof MongoMappingContext) { - indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, mongoDbFactory); + indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, this); eventPublisher = new MongoMappingEventPublisher(indexCreator); if (mappingContext instanceof ApplicationEventPublisherAware) { ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); @@ -539,11 +539,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { } public IndexOperations indexOps(String collectionName) { - return new DefaultIndexOperations(this, collectionName); + return new DefaultIndexOperations(getMongoDbFactory(), collectionName); } public IndexOperations indexOps(Class> entityClass) { - return new DefaultIndexOperations(this, determineCollectionName(entityClass)); + return new DefaultIndexOperations(getMongoDbFactory(), determineCollectionName(entityClass)); } public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) { @@ -2039,6 +2039,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware { } } + public PersistenceExceptionTranslator getExceptionTranslator() { + return exceptionTranslator; + } + private MongoPersistentEntity> getPersistentEntity(Class> type) { return type == null ? null : mappingContext.getPersistentEntity(type); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java new file mode 100644 index 000000000..9e5d876fc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java @@ -0,0 +1,29 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.dao.DataAccessException; + +import com.mongodb.MongoException; +import com.mongodb.reactivestreams.client.MongoCollection; +import org.bson.Document; +import org.reactivestreams.Publisher; + +public interface ReactiveCollectionCallback { + + Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java new file mode 100644 index 000000000..32198bdbb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java @@ -0,0 +1,27 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.dao.DataAccessException; + +import com.mongodb.MongoException; +import com.mongodb.reactivestreams.client.MongoDatabase; +import org.reactivestreams.Publisher; + +public interface ReactiveDatabaseCallback { + + Publisher doInDB(MongoDatabase db) throws MongoException, DataAccessException; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveIndexOperations.java new file mode 100644 index 000000000..45646526d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveIndexOperations.java @@ -0,0 +1,58 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Index operations on a collection. + * + * @author Mark Paluch + * @since 1.11 + */ +public interface ReactiveIndexOperations { + + /** + * Ensure that an index for the provided {@link IndexDefinition} exists for the collection indicated by the entity + * class. If not it will be created. + * + * @param indexDefinition must not be {@literal null}. + */ + Mono ensureIndex(IndexDefinition indexDefinition); + + /** + * Drops an index from this collection. + * + * @param name name of index to drop + */ + Mono dropIndex(String name); + + /** + * Drops all indices from this collection. + */ + Mono dropAllIndexes(); + + /** + * Returns the index information on the collection. + * + * @return index information on the collection + */ + Flux getIndexInfo(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java new file mode 100644 index 000000000..f9cb26f77 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java @@ -0,0 +1,127 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.util.StringUtils; + +import com.mongodb.async.client.MongoClientSettings; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; + +/** + * Convenient factory for configuring a reactive streams {@link MongoClient}. + * + * @author Mark Paluch + */ +public class ReactiveMongoClientFactoryBean extends AbstractFactoryBean + implements PersistenceExceptionTranslator { + + private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); + + private String connectionString; + private String host; + private Integer port; + private MongoClientSettings mongoClientSettings; + private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; + + /** + * Configures the host to connect to. + * + * @param host + */ + public void setHost(String host) { + this.host = host; + } + + /** + * Configures the port to connect to. + * + * @param port + */ + public void setPort(int port) { + this.port = port; + } + + /** + * Configures the connection string. + * + * @param connectionString + */ + public void setConnectionString(String connectionString) { + this.connectionString = connectionString; + } + + /** + * Configures the mongo client settings. + * + * @param mongoClientSettings + */ + public void setMongoClientSettings(MongoClientSettings mongoClientSettings) { + this.mongoClientSettings = mongoClientSettings; + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to use. + * + * @param exceptionTranslator + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; + } + + @Override + public Class> getObjectType() { + return MongoClient.class; + } + + @Override + protected MongoClient createInstance() throws Exception { + + if (mongoClientSettings != null) { + return MongoClients.create(mongoClientSettings); + } + + if (StringUtils.hasText(connectionString)) { + return MongoClients.create(connectionString); + } + + if (StringUtils.hasText(host)) { + + if (port != null) { + return MongoClients.create(String.format("mongodb://%s:%d", host, port)); + } + + return MongoClients.create(String.format("mongodb://%s", host)); + } + + throw new IllegalStateException( + "Cannot create MongoClients. One of the following is required: mongoClientSettings, connectionString or host/port"); + } + + @Override + protected void destroyInstance(MongoClient instance) throws Exception { + instance.close(); + } + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return DEFAULT_EXCEPTION_TRANSLATOR.translateExceptionIfPossible(ex); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java new file mode 100644 index 000000000..3c2d7f06a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java @@ -0,0 +1,206 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; + +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.util.Assert; + +import com.mongodb.MongoCredential; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.async.client.MongoClientSettings; +import com.mongodb.connection.ClusterSettings; +import com.mongodb.connection.ConnectionPoolSettings; +import com.mongodb.connection.ServerSettings; +import com.mongodb.connection.SocketSettings; +import com.mongodb.connection.SslSettings; +import com.mongodb.connection.StreamFactoryFactory; + +/** + * A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver. + * + * @author Mark Paluch + * @since 1.7 + */ +public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean { + + private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build(); + + private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference(); + private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern(); + private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern(); + private List credentialList = new ArrayList<>(); + private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory(); + private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry(); + private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings(); + private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings(); + private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings(); + private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings(); + private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings(); + private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings(); + + /** + * Set the {@link ReadPreference}. + * + * @param readPreference + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + /** + * Set the {@link WriteConcern}. + * + * @param writeConcern + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * Set the {@link ReadConcern}. + * + * @param readConcern + */ + public void setReadConcern(ReadConcern readConcern) { + this.readConcern = readConcern; + } + + /** + * Set the List of {@link MongoCredential}s. + * + * @param credentialList must not be {@literal null}. + */ + public void setCredentialList(List credentialList) { + + Assert.notNull(credentialList, "CredendialList must not be null!"); + + this.credentialList.addAll(credentialList); + } + + /** + * Adds the {@link MongoCredential} to the list of credentials. + * + * @param mongoCredential must not be {@literal null}. + */ + public void addMongoCredential(MongoCredential mongoCredential) { + + Assert.notNull(mongoCredential, "MongoCredential must not be null!"); + + this.credentialList.add(mongoCredential); + } + + /** + * Set the {@link StreamFactoryFactory}. + * + * @param streamFactoryFactory + */ + public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) { + this.streamFactoryFactory = streamFactoryFactory; + } + + /** + * Set the {@link CodecRegistry}. + * + * @param codecRegistry + */ + public void setCodecRegistry(CodecRegistry codecRegistry) { + this.codecRegistry = codecRegistry; + } + + /** + * Set the {@link ClusterSettings}. + * + * @param clusterSettings + */ + public void setClusterSettings(ClusterSettings clusterSettings) { + this.clusterSettings = clusterSettings; + } + + /** + * Set the {@link SocketSettings}. + * + * @param socketSettings + */ + public void setSocketSettings(SocketSettings socketSettings) { + this.socketSettings = socketSettings; + } + + /** + * Set the heartbeat {@link SocketSettings}. + * + * @param heartbeatSocketSettings + */ + public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) { + this.heartbeatSocketSettings = heartbeatSocketSettings; + } + + /** + * Set the {@link ConnectionPoolSettings}. + * + * @param connectionPoolSettings + */ + public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) { + this.connectionPoolSettings = connectionPoolSettings; + } + + /** + * Set the {@link ServerSettings}. + * + * @param serverSettings + */ + public void setServerSettings(ServerSettings serverSettings) { + this.serverSettings = serverSettings; + } + + /** + * Set the {@link SslSettings}. + * + * @param sslSettings + */ + public void setSslSettings(SslSettings sslSettings) { + this.sslSettings = sslSettings; + } + + @Override + public Class> getObjectType() { + return MongoClientSettings.class; + } + + @Override + protected MongoClientSettings createInstance() throws Exception { + + return MongoClientSettings.builder() // + .readPreference(readPreference) // + .writeConcern(writeConcern) // + .readConcern(readConcern) // + .credentialList(credentialList) // + .streamFactoryFactory(streamFactoryFactory) // + .codecRegistry(codecRegistry) // + .clusterSettings(clusterSettings) // + .socketSettings(socketSettings) // + .heartbeatSocketSettings(heartbeatSocketSettings) // + .connectionPoolSettings(connectionPoolSettings) // + .serverSettings(serverSettings) // + .sslSettings(sslSettings) // + .build(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDatabaseHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDatabaseHolder.java new file mode 100644 index 000000000..5679fa2ef --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDatabaseHolder.java @@ -0,0 +1,88 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core; + + +import com.mongodb.reactivestreams.client.MongoDatabase; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.springframework.transaction.support.ResourceHolderSupport; +import org.springframework.util.Assert; + + +/** + * @author Mark Paluch + */ +class ReactiveMongoDatabaseHolder extends ResourceHolderSupport { + private static final Object DEFAULT_KEY = new Object(); + + private final Map dbMap = new ConcurrentHashMap(); + + public ReactiveMongoDatabaseHolder(MongoDatabase db) { + addMongoDatabase(db); + } + + public ReactiveMongoDatabaseHolder(Object key, MongoDatabase db) { + addMongoDatabase(key, db); + } + + public MongoDatabase getMongoDatabase() { + return getMongoDatabase(DEFAULT_KEY); + } + + public MongoDatabase getMongoDatabase(Object key) { + return this.dbMap.get(key); + } + + public MongoDatabase getAnyMongoDatabase() { + if (!this.dbMap.isEmpty()) { + return this.dbMap.values().iterator().next(); + } + return null; + } + + public void addMongoDatabase(MongoDatabase session) { + addMongoDatabase(DEFAULT_KEY, session); + } + + public void addMongoDatabase(Object key, MongoDatabase session) { + Assert.notNull(key, "Key must not be null"); + Assert.notNull(session, "DB must not be null"); + this.dbMap.put(key, session); + } + + public MongoDatabase removeMongoDatabase(Object key) { + return this.dbMap.remove(key); + } + + public boolean containsMongoDatabase(MongoDatabase session) { + return this.dbMap.containsValue(session); + } + + public boolean isEmpty() { + return this.dbMap.isEmpty(); + } + + public boolean doesNotHoldNonDefaultMongoDatabase() { + synchronized (this.dbMap) { + return this.dbMap.isEmpty() || (this.dbMap.size() == 1 && this.dbMap.containsKey(DEFAULT_KEY)); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDbUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDbUtils.java new file mode 100644 index 000000000..f526fb7b4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoDbUtils.java @@ -0,0 +1,150 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoDatabase; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.data.authentication.UserCredentials; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.transaction.support.TransactionSynchronizationManager; + + +/** + * Helper class featuring helper methods for internal MongoDb classes. Mainly intended for internal use within the + * framework. + * + * @author Mark Paluch + */ +public abstract class ReactiveMongoDbUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoDbUtils.class); + + /** + * Private constructor to prevent instantiation. + */ + private ReactiveMongoDbUtils() {} + + /** + * Obtains a {@link MongoDatabase} connection for the given {@link MongoClient} instance and database name + * + * @param mongo the {@link MongoClient} instance, must not be {@literal null}. + * @param databaseName the database name, must not be {@literal null} or empty. + * @return the {@link MongoDatabase} connection + */ + public static MongoDatabase getMongoDatabase(MongoClient mongo, String databaseName) { + return doGetMongoDatabase(mongo, databaseName, UserCredentials.NO_CREDENTIALS, true, databaseName); + } + + private static MongoDatabase doGetMongoDatabase(MongoClient mongo, String databaseName, UserCredentials credentials, + boolean allowCreate, String authenticationDatabaseName) { + + ReactiveMongoDatabaseHolder dbHolder = (ReactiveMongoDatabaseHolder) TransactionSynchronizationManager + .getResource(mongo); + + // Do we have a populated holder and TX sync active? + if (dbHolder != null && !dbHolder.isEmpty() && TransactionSynchronizationManager.isSynchronizationActive()) { + + MongoDatabase db = dbHolder.getMongoDatabase(databaseName); + + // DB found but not yet synchronized + if (db != null && !dbHolder.isSynchronizedWithTransaction()) { + + LOGGER.debug("Registering Spring transaction synchronization for existing MongoDB {}.", databaseName); + + TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(dbHolder, mongo)); + dbHolder.setSynchronizedWithTransaction(true); + } + + if (db != null) { + return db; + } + } + + // Lookup fresh database instance + LOGGER.debug("Getting Mongo Database name=[{}]", databaseName); + + MongoDatabase db = mongo.getDatabase(databaseName); + + // TX sync active, bind new database to thread + if (TransactionSynchronizationManager.isSynchronizationActive()) { + + LOGGER.debug("Registering Spring transaction synchronization for MongoDB instance {}.", databaseName); + + ReactiveMongoDatabaseHolder holderToUse = dbHolder; + + if (holderToUse == null) { + holderToUse = new ReactiveMongoDatabaseHolder(databaseName, db); + } else { + holderToUse.addMongoDatabase(databaseName, db); + } + + // synchronize holder only if not yet synchronized + if (!holderToUse.isSynchronizedWithTransaction()) { + TransactionSynchronizationManager.registerSynchronization(new MongoSynchronization(holderToUse, mongo)); + holderToUse.setSynchronizedWithTransaction(true); + } + + if (holderToUse != dbHolder) { + TransactionSynchronizationManager.bindResource(mongo, holderToUse); + } + } + + // Check whether we are allowed to return the DB. + if (!allowCreate && !isDBTransactional(db, mongo)) { + throw new IllegalStateException( + "No Mongo DB bound to thread, " + "and configuration does not allow creation of non-transactional one here"); + } + + return db; + } + + /** + * Return whether the given DB instance is transactional, that is, bound to the current thread by Spring's transaction + * facilities. + * + * @param db the DB to check + * @param mongoClient the Mongo instance that the DB was created with (may be null) + * @return whether the DB is transactional + */ + public static boolean isDBTransactional(MongoDatabase db, MongoClient mongoClient) { + + if (mongoClient == null) { + return false; + } + ReactiveMongoDatabaseHolder dbHolder = (ReactiveMongoDatabaseHolder) TransactionSynchronizationManager + .getResource(mongoClient); + return dbHolder != null && dbHolder.containsMongoDatabase(db); + } + + /** + * Check if credentials present. In case we're using a mongo-java-driver version 3 or above we do not have the need + * for authentication as the auth data has to be provided within the MongoClient + * + * @param credentials + * @return + */ + private static boolean requiresAuthDbAuthentication(UserCredentials credentials) { + + if (credentials == null || !credentials.hasUsername()) { + return false; + } + + return !MongoClientVersion.isMongo3Driver(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java new file mode 100644 index 000000000..e6ed168a9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java @@ -0,0 +1,958 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Collection; + +import org.bson.Document; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscription; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.GeoResults; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.ReadPreference; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.MongoCollection; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * Interface that specifies a basic set of MongoDB operations executed in a reactive way. + * + * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability + * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using + * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}. + * + * @author Mark Paluch + * @see Flux + * @see Mono + * @see http://projectreactor.io/docs/ + * @since 2.0 + */ +public interface ReactiveMongoOperations { + + /** + * Returns the reactive operations that can be performed on indexes + * + * @return index operations on the named collection + */ + ReactiveIndexOperations reactiveIndexOps(String collectionName); + + /** + * Returns the reactive operations that can be performed on indexes + * + * @return index operations on the named collection associated with the given entity class + */ + ReactiveIndexOperations reactiveIndexOps(Class> entityClass); + + /** + * Returns the operations that can be performed on indexes + * + * @return index operations on the named collection + */ + IndexOperations indexOps(String collectionName); + + /** + * Returns the operations that can be performed on indexes + * + * @return index operations on the named collection associated with the given entity class + */ + IndexOperations indexOps(Class> entityClass); + + /** + * Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the + * MongoDB driver to convert the JSON string to a DBObject. Any errors that result from executing this command will be + * converted into Spring's DAO exception hierarchy. + * + * @param jsonCommand a MongoDB command expressed as a JSON string. + * @return a result object returned by the action + */ + Mono executeCommand(String jsonCommand); + + /** + * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's DAO + * exception hierarchy. + * + * @param command a MongoDB command + * @return a result object returned by the action + */ + Mono executeCommand(Document command); + + /** + * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's data + * access exception hierarchy. + * + * @param command a MongoDB command, must not be {@literal null}. + * @param readPreference read preferences to use, can be {@literal null}. + * @return a result object returned by the action + */ + Mono executeCommand(Document command, ReadPreference readPreference); + + /** + * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary. + * + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param return type + * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. + * @return a result object returned by the action + */ + Flux execute(ReactiveDatabaseCallback action); + + /** + * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class. + * + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param entityClass class that determines the collection to use + * @param return type + * @param action callback object that specifies the MongoDB action + * @return a result object returned by the action or null + */ + Flux execute(Class> entityClass, ReactiveCollectionCallback action); + + /** + * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name. + * + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param return type + * @param collectionName the name of the collection that specifies which DBCollection instance will be passed into + * @param action callback object that specifies the MongoDB action the callback action. + * @return a result object returned by the action or null + */ + Flux execute(String collectionName, ReactiveCollectionCallback action); + + /** + * Create an uncapped collection with a name based on the provided entity class. + * + * @param entityClass class that determines the collection to create + * @return the created collection + */ + Mono> createCollection(Class entityClass); + + /** + * Create a collection with a name based on the provided entity class using the options. + * + * @param entityClass class that determines the collection to create + * @param collectionOptions options to use when creating the collection. + * @return the created collection + */ + Mono> createCollection(Class entityClass, CollectionOptions collectionOptions); + + /** + * Create an uncapped collection with the provided name. + * + * @param collectionName name of the collection + * @return the created collection + */ + Mono> createCollection(String collectionName); + + /** + * Create a collection with the provided name and options. + * + * @param collectionName name of the collection + * @param collectionOptions options to use when creating the collection. + * @return the created collection + */ + Mono> createCollection(String collectionName, CollectionOptions collectionOptions); + + /** + * A set of collection names. + * + * @return list of collection names + */ + Flux getCollectionNames(); + + /** + * Get a collection by name, creating it if it doesn't exist. + * + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection + * @return an existing collection or a newly created one. + */ + MongoCollection getCollection(String collectionName); + + /** + * Check to see if a collection with a name indicated by the entity class exists. + * + * Translate any exceptions as necessary. + * + * @param entityClass class that determines the name of the collection + * @return true if a collection with the given name is found, false otherwise. + */ + Mono collectionExists(Class entityClass); + + /** + * Check to see if a collection with a given name exists. + * + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection + * @return true if a collection with the given name is found, false otherwise. + */ + Mono collectionExists(String collectionName); + + /** + * Drop the collection with the name indicated by the entity class. + * + * Translate any exceptions as necessary. + * + * @param entityClass class that determines the collection to drop/delete. + */ + Mono dropCollection(Class entityClass); + + /** + * Drop the collection with the given name. + * + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection to drop/delete. + */ + Mono dropCollection(String collectionName); + + /** + * Query for a list of objects of type T from the collection used by the entity class. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + * + * @param entityClass the parametrized type of the returned list + * @return the converted collection + */ + Flux findAll(Class entityClass); + + /** + * Query for a list of objects of type T from the specified collection. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + * + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted collection + */ + Flux findAll(Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the + * specified type. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the converted object + */ + Mono findOne(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified + * type. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted object + */ + Mono findOne(Query query, Class entityClass, String collectionName); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param collectionName name of the collection to check for objects. + * @return + */ + Mono exists(Query query, String collectionName); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param entityClass the parametrized type. + * @return + */ + Mono exists(Query query, Class> entityClass); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param entityClass the parametrized type. + * @param collectionName name of the collection to check for objects. + * @return + */ + Mono exists(Query query, Class> entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the List of converted objects + */ + Flux find(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a List of the specified type. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the List of converted objects + */ + Flux find(Query query, Class entityClass, String collectionName); + + /** + * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be + * derived from the given target class as well. + * + * @param + * @param id the id of the document to return. + * @param entityClass the type the document shall be converted into. + * @return the document with the given id mapped onto the given target class. + */ + Mono findById(Object id, Class entityClass); + + /** + * Returns the document with the given id from the given collection mapped onto the given target class. + * + * @param id the id of the document to return + * @param entityClass the type to convert the document to + * @param collectionName the collection to query for the document + * @param + * @return + */ + Mono findById(Object id, Class entityClass, String collectionName); + + /** + * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Will consider entity mapping + * information to determine the collection the query is ran against. Note, that MongoDB limits the number of results + * by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of + * results. + * + * @param near must not be {@literal null}. + * @param entityClass must not be {@literal null}. + * @return + */ + Flux> geoNear(NearQuery near, Class entityClass); + + /** + * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the + * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a + * particular number of results. + * + * @param near must not be {@literal null}. + * @param entityClass must not be {@literal null}. + * @param collectionName the collection to trigger the query against. If no collection name is given the entity class + * will be inspected. + * @return + */ + Flux> geoNear(NearQuery near, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param entityClass the parametrized type. + * @return + */ + Mono findAndModify(Query query, Update update, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param entityClass the parametrized type. + * @param collectionName the collection to query. + * @return + */ + Mono findAndModify(Query query, Update update, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * {@link FindAndModifyOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param options the {@link FindAndModifyOptions} holding additional information. + * @param entityClass the parametrized type. + * @return + */ + Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * {@link FindAndModifyOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param options the {@link FindAndModifyOptions} holding additional information. + * @param entityClass the parametrized type. + * @param collectionName the collection to query. + * @return + */ + Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the + * specified type. The first document that matches the query is returned and also removed from the collection in the + * database. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the converted object + */ + Mono findAndRemove(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified + * type. The first document that matches the query is returned and also removed from the collection in the database. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted object + */ + Mono findAndRemove(Query query, Class entityClass, String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + * + * @param query + * @param entityClass must not be {@literal null}. + * @return + */ + Mono count(Query query, Class> entityClass); + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. TO make sure the query gets mapped, use {@link #count(Query, Class, String)}. + * + * @param query + * @param collectionName must not be {@literal null} or empty. + * @return + * @see #count(Query, Class, String) + */ + Mono count(Query query, String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}. + * + * @param query + * @param entityClass must not be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return + */ + Mono count(Query query, Class> entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * + * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection. + * @return + */ + Mono insert(T objectToSave); + + /** + * Insert the object into the specified collection. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono insert(T objectToSave, String collectionName); + + /** + * Insert a Collection of objects into a collection in a single batch write to the database. + * + * @param batchToSave the list of objects to save. + * @param entityClass class that determines the collection to use + * @return + */ + Flux insert(Collection extends T> batchToSave, Class> entityClass); + + /** + * Insert a list of objects into the specified collection in a single batch write to the database. + * + * @param batchToSave the list of objects to save. + * @param collectionName name of the collection to store the object in + * @return + */ + Flux insert(Collection extends T> batchToSave, String collectionName); + + /** + * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the + * class. + * + * @param objectsToSave the list of objects to save. + * @return + */ + Flux insertAll(Collection extends T> objectsToSave); + + /** + * Insert the object into the collection for the entity type of the object to save. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * + * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection. + * @return + */ + Mono insert(Mono extends T> objectToSave); + + /** + * Insert a Collection of objects into a collection in a single batch write to the database. + * + * @param batchToSave the publisher which provides objects to save. + * @param entityClass class that determines the collection to use + * @return + */ + Flux insert(Publisher extends T> batchToSave, Class> entityClass); + + /** + * Insert a list of objects into the specified collection in a single batch write to the database. + * + * @param batchToSave the publisher which provides objects to save. + * @param collectionName name of the collection to store the object in + * @return + */ + Flux insert(Publisher extends T> batchToSave, String collectionName); + + /** + * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the + * class. + * + * @param objectsToSave the publisher which provides objects to save. + * @return + */ + Flux insertAll(Publisher extends T> objectsToSave); + + /** + * Save the object to the collection for the entity type of the object to save. This will perform an insert if the + * object is not already present, that is an 'upsert'. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @return + */ + Mono save(T objectToSave); + + /** + * Save the object to the specified collection. This will perform an insert if the object is not already present, that + * is an 'upsert'. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's + * Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono save(T objectToSave, String collectionName); + + /** + * Save the object to the collection for the entity type of the object to save. This will perform an insert if the + * object is not already present, that is an 'upsert'. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @return + */ + Mono save(Mono extends T> objectToSave); + + /** + * Save the object to the specified collection. This will perform an insert if the object is not already present, that + * is an 'upsert'. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's + * Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono save(Mono extends T> objectToSave, String collectionName); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be upserted + * @param update the update document that contains the updated object or $ operators to manipulate the existing object + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, Class> entityClass); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, String collectionName); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be upserted + * @param update the update document that contains the updated object or $ operators to manipulate the existing object + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, Class> entityClass, String collectionName); + + /** + * Updates the first object that is found in the collection of the entity class that matches the query document with + * the provided update document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, Class> entityClass); + + /** + * Updates the first object that is found in the specified collection that matches the query document criteria with + * the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, String collectionName); + + /** + * Updates the first object that is found in the specified collection that matches the query document criteria with + * the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, Class> entityClass, String collectionName); + + /** + * Updates all objects that are found in the collection for the entity class that matches the query document criteria + * with the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(Query query, Update update, Class> entityClass); + + /** + * Updates all objects that are found in the specified collection that matches the query document criteria with the + * provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(Query query, Update update, String collectionName); + + /** + * Updates all objects that are found in the collection for the entity class that matches the query document criteria + * with the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(final Query query, final Update update, Class> entityClass, String collectionName); + + /** + * Remove the given object from the collection by id. + * + * @param object + * @return + */ + Mono remove(Object object); + + /** + * Removes the given object from the given collection. + * + * @param object + * @param collection must not be {@literal null} or empty. + */ + Mono remove(Object object, String collection); + + /** + * Remove the given object from the collection by id. + * + * @param objectToRemove + * @return + */ + Mono remove(Mono extends Object> objectToRemove); + + /** + * Removes the given object from the given collection. + * + * @param objectToRemove + * @param collection must not be {@literal null} or empty. + * @return + */ + Mono remove(Mono extends Object> objectToRemove, String collection); + + /** + * Remove all documents that match the provided query document criteria from the the collection used to store the + * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. + * + * @param query + * @param entityClass + * @return + */ + Mono remove(Query query, Class> entityClass); + + /** + * Remove all documents that match the provided query document criteria from the the collection used to store the + * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. + * + * @param query + * @param entityClass + * @param collectionName + * @return + */ + Mono remove(Query query, Class> entityClass, String collectionName); + + /** + * Remove all documents from the specified collection that match the provided query document criteria. There is no + * conversion/mapping done for any criteria using the id field. + * + * @param query the query document that specifies the criteria used to remove a record + * @param collectionName name of the collection where the objects will removed + */ + Mono remove(Query query, String collectionName); + + /** + * Returns and removes all documents form the specified collection that match the provided query. + * + * @param query + * @param collectionName + * @return + */ + Flux findAllAndRemove(Query query, String collectionName); + + /** + * Returns and removes all documents matching the given query form the collection used to store the entityClass. + * + * @param query + * @param entityClass + * @return + */ + Flux findAllAndRemove(Query query, Class entityClass); + + /** + * Returns and removes all documents that match the provided query document criteria from the the collection used to + * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in + * the query. + * + * @param query + * @param entityClass + * @param collectionName + * @return + */ + Flux findAllAndRemove(Query query, Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified + * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite + * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the List of converted objects + */ + Flux tail(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified + * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite + * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the List of converted objects + */ + Flux tail(Query query, Class entityClass, String collectionName); + + /** + * Returns the underlying {@link MongoConverter}. + * + * @return + */ + MongoConverter getConverter(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java new file mode 100644 index 000000000..fb1230ff4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -0,0 +1,2445 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.SerializationUtils.*; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.ApplicationEventPublisherAware; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.core.convert.ConversionService; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.EntityReader; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Metric; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback; +import org.springframework.data.mongodb.core.convert.DbRefProxyHandler; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DbRefResolverCallback; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; +import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.BasicDBObject; +import com.mongodb.CursorType; +import com.mongodb.DBCollection; +import com.mongodb.DBCursor; +import com.mongodb.DBRef; +import com.mongodb.Mongo; +import com.mongodb.MongoException; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReturnDocument; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; +import com.mongodb.reactivestreams.client.Success; +import com.mongodb.util.JSONParseException; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; + +/** + * Primary implementation of {@link ReactiveMongoOperations}. It simplifies the use of Reactive MongoDB usage and helps + * to avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and + * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindPublisher} and + * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the + * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a + * {@link SimpleReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services as + * bean reference. Note: The {@link SimpleReactiveMongoDatabaseFactory} should always be configured as a bean in the + * application context, in the first case given to the service directly, in the second case to the prepared template. + * + * @author Mark Paluch + * @since 2.0 + */ +public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware { + + public static final DbRefResolver NO_OP_REF_RESOLVER = new NoOpDbRefResolver(); + + private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class); + private static final String ID_FIELD = "_id"; + private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; + private static final Collection> ITERABLE_CLASSES; + + static { + + Set> iterableClasses = new HashSet<>(); + iterableClasses.add(List.class); + iterableClasses.add(Collection.class); + iterableClasses.add(Iterator.class); + iterableClasses.add(Publisher.class); + + ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses); + } + + private final MongoConverter mongoConverter; + private final MappingContext extends MongoPersistentEntity>, MongoPersistentProperty> mappingContext; + private final ReactiveMongoDatabaseFactory mongoDatabaseFactory; + private final PersistenceExceptionTranslator exceptionTranslator; + private final QueryMapper queryMapper; + private final UpdateMapper updateMapper; + + private int publisherBatchSize = 10; + private WriteConcern writeConcern; + private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; + private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; + private ReadPreference readPreference; + private ApplicationEventPublisher eventPublisher; + private MongoPersistentEntityIndexCreator indexCreator; + + /** + * Constructor used for a basic template configuration. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + */ + public ReactiveMongoTemplate(MongoClient mongoClient, String databaseName) { + this(new SimpleReactiveMongoDatabaseFactory(mongoClient, databaseName), null); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory) { + this(mongoDatabaseFactory, null); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + * @param mongoConverter + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter) { + + Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null!"); + + this.mongoDatabaseFactory = mongoDatabaseFactory; + this.exceptionTranslator = mongoDatabaseFactory.getExceptionTranslator(); + this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter() : mongoConverter; + this.queryMapper = new QueryMapper(this.mongoConverter); + this.updateMapper = new UpdateMapper(this.mongoConverter); + + // We always have a mapping context in the converter, whether it's a simple one or not + mappingContext = this.mongoConverter.getMappingContext(); + // We create indexes based on mapping events + + if (null != mappingContext && mappingContext instanceof MongoMappingContext) { + indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, + new BlockingIndexOptionsProvider(this)); + eventPublisher = new MongoMappingEventPublisher(indexCreator); + if (mappingContext instanceof ApplicationEventPublisherAware) { + ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + } + } + } + + /** + * Configures the {@link WriteResultChecking} to be used with the template. Setting {@literal null} will reset the + * default of {@link ReactiveMongoTemplate#DEFAULT_WRITE_RESULT_CHECKING}. + * + * @param resultChecking + */ + public void setWriteResultChecking(WriteResultChecking resultChecking) { + this.writeResultChecking = resultChecking == null ? DEFAULT_WRITE_RESULT_CHECKING : resultChecking; + } + + /** + * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} + * configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no + * {@link WriteConcern} will be used. + * + * @param writeConcern + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * Configures the {@link WriteConcernResolver} to be used with the template. + * + * @param writeConcernResolver + */ + public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) { + this.writeConcernResolver = writeConcernResolver; + } + + /** + * Used by @{link {@link #prepareCollection(MongoCollection)} to set the {@link ReadPreference} before any operations + * are performed. + * + * @param readPreference + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + /** + * Used to set a batch size when working with batches of {@link Publisher} emitting items to insert. + * + * @param publisherBatchSize batch size + */ + public void setPublisherBatchSize(int publisherBatchSize) { + this.publisherBatchSize = publisherBatchSize; + } + + /* + * (non-Javadoc) + * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + */ + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + + prepareIndexCreator(applicationContext); + + eventPublisher = applicationContext; + if (mappingContext instanceof ApplicationEventPublisherAware) { + ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + } + } + + /** + * Inspects the given {@link ApplicationContext} for {@link MongoPersistentEntityIndexCreator} and those in turn if + * they were registered for the current {@link MappingContext}. If no creator for the current {@link MappingContext} + * can be found we manually add the internally created one as {@link ApplicationListener} to make sure indexes get + * created appropriately for entity types persisted through this {@link ReactiveMongoTemplate} instance. + * + * @param context must not be {@literal null}. + */ + private void prepareIndexCreator(ApplicationContext context) { + + String[] indexCreators = context.getBeanNamesForType(MongoPersistentEntityIndexCreator.class); + + for (String creator : indexCreators) { + MongoPersistentEntityIndexCreator creatorBean = context.getBean(creator, MongoPersistentEntityIndexCreator.class); + if (creatorBean.isIndexCreatorFor(mappingContext)) { + return; + } + } + + if (context instanceof ConfigurableApplicationContext) { + ((ConfigurableApplicationContext) context).addApplicationListener(indexCreator); + } + } + + /** + * Returns the default {@link MongoConverter}. + * + * @return + */ + public MongoConverter getConverter() { + return this.mongoConverter; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) + */ + public ReactiveIndexOperations reactiveIndexOps(String collectionName) { + return new DefaultReactiveIndexOperations(this, collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) + */ + public ReactiveIndexOperations reactiveIndexOps(Class> entityClass) { + return new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) + */ + public IndexOperations indexOps(String collectionName) { + return new BlockingIndexOperations(new DefaultReactiveIndexOperations(this, collectionName)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) + */ + public IndexOperations indexOps(Class> entityClass) { + return new BlockingIndexOperations(new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass))); + } + + public String getCollectionName(Class> entityClass) { + return this.determineCollectionName(entityClass); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(java.lang.String) + */ + public Mono executeCommand(String jsonCommand) { + + Assert.notNull(jsonCommand, "Command must not be empty!"); + + return executeCommand(Document.parse(jsonCommand)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document) + */ + public Mono executeCommand(final Document command) { + + Assert.notNull(command, "Command must not be null!"); + + return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference) : db.runCommand(command)) + .next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) + */ + public Mono executeCommand(final Document command, final ReadPreference readPreference) { + + Assert.notNull(command, "Command must not be null!"); + + return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference) : db.runCommand(command)) + .next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.ReactiveCollectionCallback) + */ + @Override + public Flux execute(Class> entityClass, ReactiveCollectionCallback action) { + return createFlux(determineCollectionName(entityClass), action); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(org.springframework.data.mongodb.core.ReactiveDbCallback) + */ + @Override + public Flux execute(ReactiveDatabaseCallback action) { + return createFlux(action); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.ReactiveCollectionCallback) + */ + public Flux execute(String collectionName, ReactiveCollectionCallback callback) { + Assert.notNull(callback); + return createFlux(collectionName, callback); + } + + /** + * Create a reusable Flux for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Flux} wrapping the {@link ReactiveDatabaseCallback}. + */ + public Flux createFlux(ReactiveDatabaseCallback callback) { + + Assert.notNull(callback); + + return Flux.defer(() -> callback.doInDB(getMongoDatabase())).onErrorResumeWith(translateFluxException()); + } + + /** + * Create a reusable Mono for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Mono} wrapping the {@link ReactiveDatabaseCallback}. + */ + public Mono createMono(final ReactiveDatabaseCallback callback) { + + Assert.notNull(callback); + + return Mono.defer(() -> Mono.from(callback.doInDB(getMongoDatabase()))).otherwise(translateMonoException()); + } + + /** + * Create a reusable {@link Flux} for the {@code collectionName} and {@link ReactiveCollectionCallback}. + * + * @param collectionName must not be empty or {@literal null}. + * @param callback must not be {@literal null}. + * @return a reusable {@link Flux} wrapping the {@link ReactiveCollectionCallback}. + */ + public Flux createFlux(String collectionName, ReactiveCollectionCallback callback) { + + Assert.hasText(collectionName); + Assert.notNull(callback); + + Mono> collectionPublisher = Mono + .fromCallable(() -> getAndPrepareCollection(getMongoDatabase(), collectionName)); + + return collectionPublisher.flatMap(callback::doInCollection).onErrorResumeWith(translateFluxException()); + } + + /** + * Create a reusable {@link Mono} for the {@code collectionName} and {@link ReactiveCollectionCallback}. + * + * @param collectionName must not be empty or {@literal null}. + * @param callback must not be {@literal null}. + * @param + * @return a reusable {@link Mono} wrapping the {@link ReactiveCollectionCallback}. + */ + public Mono createMono(String collectionName, ReactiveCollectionCallback callback) { + + Assert.hasText(collectionName); + Assert.notNull(callback); + + Mono> collectionPublisher = Mono + .fromCallable(() -> getAndPrepareCollection(getMongoDatabase(), collectionName)); + + return collectionPublisher.then(collection -> Mono.from(callback.doInCollection(collection))) + .otherwise(translateMonoException()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class) + */ + public Mono> createCollection(Class entityClass) { + return createCollection(determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) + */ + public Mono> createCollection(Class entityClass, + CollectionOptions collectionOptions) { + return createCollection(determineCollectionName(entityClass), collectionOptions); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String) + */ + public Mono> createCollection(final String collectionName) { + return doCreateCollection(collectionName, new CreateCollectionOptions()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) + */ + public Mono> createCollection(final String collectionName, + final CollectionOptions collectionOptions) { + return doCreateCollection(collectionName, convertToCreateCollectionOptions(collectionOptions)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollection(java.lang.String) + */ + public MongoCollection getCollection(final String collectionName) { + return execute((MongoDatabaseCallback>) db -> db.getCollection(collectionName)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.Class) + */ + public Mono collectionExists(Class entityClass) { + return collectionExists(determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.String) + */ + public Mono
null
+ * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability + * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using + * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}. + * + * @author Mark Paluch + * @see Flux + * @see Mono + * @see http://projectreactor.io/docs/ + * @since 2.0 + */ +public interface ReactiveMongoOperations { + + /** + * Returns the reactive operations that can be performed on indexes + * + * @return index operations on the named collection + */ + ReactiveIndexOperations reactiveIndexOps(String collectionName); + + /** + * Returns the reactive operations that can be performed on indexes + * + * @return index operations on the named collection associated with the given entity class + */ + ReactiveIndexOperations reactiveIndexOps(Class> entityClass); + + /** + * Returns the operations that can be performed on indexes + * + * @return index operations on the named collection + */ + IndexOperations indexOps(String collectionName); + + /** + * Returns the operations that can be performed on indexes + * + * @return index operations on the named collection associated with the given entity class + */ + IndexOperations indexOps(Class> entityClass); + + /** + * Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the + * MongoDB driver to convert the JSON string to a DBObject. Any errors that result from executing this command will be + * converted into Spring's DAO exception hierarchy. + * + * @param jsonCommand a MongoDB command expressed as a JSON string. + * @return a result object returned by the action + */ + Mono executeCommand(String jsonCommand); + + /** + * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's DAO + * exception hierarchy. + * + * @param command a MongoDB command + * @return a result object returned by the action + */ + Mono executeCommand(Document command); + + /** + * Execute a MongoDB command. Any errors that result from executing this command will be converted into Spring's data + * access exception hierarchy. + * + * @param command a MongoDB command, must not be {@literal null}. + * @param readPreference read preferences to use, can be {@literal null}. + * @return a result object returned by the action + */ + Mono executeCommand(Document command, ReadPreference readPreference); + + /** + * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary. + * + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param return type + * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. + * @return a result object returned by the action + */ + Flux execute(ReactiveDatabaseCallback action); + + /** + * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class. + * + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param entityClass class that determines the collection to use + * @param return type + * @param action callback object that specifies the MongoDB action + * @return a result object returned by the action or null + */ + Flux execute(Class> entityClass, ReactiveCollectionCallback action); + + /** + * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name. + * + * Allows for returning a result object, that is a domain object or a collection of domain objects. + * + * @param return type + * @param collectionName the name of the collection that specifies which DBCollection instance will be passed into + * @param action callback object that specifies the MongoDB action the callback action. + * @return a result object returned by the action or null + */ + Flux execute(String collectionName, ReactiveCollectionCallback action); + + /** + * Create an uncapped collection with a name based on the provided entity class. + * + * @param entityClass class that determines the collection to create + * @return the created collection + */ + Mono> createCollection(Class entityClass); + + /** + * Create a collection with a name based on the provided entity class using the options. + * + * @param entityClass class that determines the collection to create + * @param collectionOptions options to use when creating the collection. + * @return the created collection + */ + Mono> createCollection(Class entityClass, CollectionOptions collectionOptions); + + /** + * Create an uncapped collection with the provided name. + * + * @param collectionName name of the collection + * @return the created collection + */ + Mono> createCollection(String collectionName); + + /** + * Create a collection with the provided name and options. + * + * @param collectionName name of the collection + * @param collectionOptions options to use when creating the collection. + * @return the created collection + */ + Mono> createCollection(String collectionName, CollectionOptions collectionOptions); + + /** + * A set of collection names. + * + * @return list of collection names + */ + Flux getCollectionNames(); + + /** + * Get a collection by name, creating it if it doesn't exist. + * + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection + * @return an existing collection or a newly created one. + */ + MongoCollection getCollection(String collectionName); + + /** + * Check to see if a collection with a name indicated by the entity class exists. + * + * Translate any exceptions as necessary. + * + * @param entityClass class that determines the name of the collection + * @return true if a collection with the given name is found, false otherwise. + */ + Mono collectionExists(Class entityClass); + + /** + * Check to see if a collection with a given name exists. + * + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection + * @return true if a collection with the given name is found, false otherwise. + */ + Mono collectionExists(String collectionName); + + /** + * Drop the collection with the name indicated by the entity class. + * + * Translate any exceptions as necessary. + * + * @param entityClass class that determines the collection to drop/delete. + */ + Mono dropCollection(Class entityClass); + + /** + * Drop the collection with the given name. + * + * Translate any exceptions as necessary. + * + * @param collectionName name of the collection to drop/delete. + */ + Mono dropCollection(String collectionName); + + /** + * Query for a list of objects of type T from the collection used by the entity class. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + * + * @param entityClass the parametrized type of the returned list + * @return the converted collection + */ + Flux findAll(Class entityClass); + + /** + * Query for a list of objects of type T from the specified collection. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + * + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted collection + */ + Flux findAll(Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the + * specified type. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the converted object + */ + Mono findOne(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified + * type. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted object + */ + Mono findOne(Query query, Class entityClass, String collectionName); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param collectionName name of the collection to check for objects. + * @return + */ + Mono exists(Query query, String collectionName); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param entityClass the parametrized type. + * @return + */ + Mono exists(Query query, Class> entityClass); + + /** + * Determine result of given {@link Query} contains at least one element. + * + * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param entityClass the parametrized type. + * @param collectionName name of the collection to check for objects. + * @return + */ + Mono exists(Query query, Class> entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the List of converted objects + */ + Flux find(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a List of the specified type. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the List of converted objects + */ + Flux find(Query query, Class entityClass, String collectionName); + + /** + * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be + * derived from the given target class as well. + * + * @param + * @param id the id of the document to return. + * @param entityClass the type the document shall be converted into. + * @return the document with the given id mapped onto the given target class. + */ + Mono findById(Object id, Class entityClass); + + /** + * Returns the document with the given id from the given collection mapped onto the given target class. + * + * @param id the id of the document to return + * @param entityClass the type to convert the document to + * @param collectionName the collection to query for the document + * @param + * @return + */ + Mono findById(Object id, Class entityClass, String collectionName); + + /** + * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Will consider entity mapping + * information to determine the collection the query is ran against. Note, that MongoDB limits the number of results + * by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of + * results. + * + * @param near must not be {@literal null}. + * @param entityClass must not be {@literal null}. + * @return + */ + Flux> geoNear(NearQuery near, Class entityClass); + + /** + * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the + * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a + * particular number of results. + * + * @param near must not be {@literal null}. + * @param entityClass must not be {@literal null}. + * @param collectionName the collection to trigger the query against. If no collection name is given the entity class + * will be inspected. + * @return + */ + Flux> geoNear(NearQuery near, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param entityClass the parametrized type. + * @return + */ + Mono findAndModify(Query query, Update update, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param entityClass the parametrized type. + * @param collectionName the collection to query. + * @return + */ + Mono findAndModify(Query query, Update update, Class entityClass, String collectionName); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * {@link FindAndModifyOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param options the {@link FindAndModifyOptions} holding additional information. + * @param entityClass the parametrized type. + * @return + */ + Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + + /** + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * {@link FindAndModifyOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional + * fields specification. + * @param update the {@link Update} to apply on matching documents. + * @param options the {@link FindAndModifyOptions} holding additional information. + * @param entityClass the parametrized type. + * @param collectionName the collection to query. + * @return + */ + Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the + * specified type. The first document that matches the query is returned and also removed from the collection in the + * database. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the converted object + */ + Mono findAndRemove(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified + * type. The first document that matches the query is returned and also removed from the collection in the database. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the converted object + */ + Mono findAndRemove(Query query, Class entityClass, String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + * + * @param query + * @param entityClass must not be {@literal null}. + * @return + */ + Mono count(Query query, Class> entityClass); + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. TO make sure the query gets mapped, use {@link #count(Query, Class, String)}. + * + * @param query + * @param collectionName must not be {@literal null} or empty. + * @return + * @see #count(Query, Class, String) + */ + Mono count(Query query, String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}. + * + * @param query + * @param entityClass must not be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return + */ + Mono count(Query query, Class> entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * + * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection. + * @return + */ + Mono insert(T objectToSave); + + /** + * Insert the object into the specified collection. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono insert(T objectToSave, String collectionName); + + /** + * Insert a Collection of objects into a collection in a single batch write to the database. + * + * @param batchToSave the list of objects to save. + * @param entityClass class that determines the collection to use + * @return + */ + Flux insert(Collection extends T> batchToSave, Class> entityClass); + + /** + * Insert a list of objects into the specified collection in a single batch write to the database. + * + * @param batchToSave the list of objects to save. + * @param collectionName name of the collection to store the object in + * @return + */ + Flux insert(Collection extends T> batchToSave, String collectionName); + + /** + * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the + * class. + * + * @param objectsToSave the list of objects to save. + * @return + */ + Flux insertAll(Collection extends T> objectsToSave); + + /** + * Insert the object into the collection for the entity type of the object to save. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * + * Insert is used to initially store the object into the database. To update an existing object use the save method. + * + * @param objectToSave the object to store in the collection. + * @return + */ + Mono insert(Mono extends T> objectToSave); + + /** + * Insert a Collection of objects into a collection in a single batch write to the database. + * + * @param batchToSave the publisher which provides objects to save. + * @param entityClass class that determines the collection to use + * @return + */ + Flux insert(Publisher extends T> batchToSave, Class> entityClass); + + /** + * Insert a list of objects into the specified collection in a single batch write to the database. + * + * @param batchToSave the publisher which provides objects to save. + * @param collectionName name of the collection to store the object in + * @return + */ + Flux insert(Publisher extends T> batchToSave, String collectionName); + + /** + * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the + * class. + * + * @param objectsToSave the publisher which provides objects to save. + * @return + */ + Flux insertAll(Publisher extends T> objectsToSave); + + /** + * Save the object to the collection for the entity type of the object to save. This will perform an insert if the + * object is not already present, that is an 'upsert'. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @return + */ + Mono save(T objectToSave); + + /** + * Save the object to the specified collection. This will perform an insert if the object is not already present, that + * is an 'upsert'. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's + * Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono save(T objectToSave, String collectionName); + + /** + * Save the object to the collection for the entity type of the object to save. This will perform an insert if the + * object is not already present, that is an 'upsert'. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * + * Spring's Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @return + */ + Mono save(Mono extends T> objectToSave); + + /** + * Save the object to the specified collection. This will perform an insert if the object is not already present, that + * is an 'upsert'. + * + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's + * Type Conversion" for more details. + * + * @param objectToSave the object to store in the collection + * @param collectionName name of the collection to store the object in + * @return + */ + Mono save(Mono extends T> objectToSave, String collectionName); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be upserted + * @param update the update document that contains the updated object or $ operators to manipulate the existing object + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, Class> entityClass); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, String collectionName); + + /** + * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by + * combining the query document and the update document. + * + * @param query the query document that specifies the criteria used to select a record to be upserted + * @param update the update document that contains the updated object or $ operators to manipulate the existing object + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono upsert(Query query, Update update, Class> entityClass, String collectionName); + + /** + * Updates the first object that is found in the collection of the entity class that matches the query document with + * the provided update document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, Class> entityClass); + + /** + * Updates the first object that is found in the specified collection that matches the query document criteria with + * the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, String collectionName); + + /** + * Updates the first object that is found in the specified collection that matches the query document criteria with + * the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateFirst(Query query, Update update, Class> entityClass, String collectionName); + + /** + * Updates all objects that are found in the collection for the entity class that matches the query document criteria + * with the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class that determines the collection to use + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(Query query, Update update, Class> entityClass); + + /** + * Updates all objects that are found in the specified collection that matches the query document criteria with the + * provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(Query query, Update update, String collectionName); + + /** + * Updates all objects that are found in the collection for the entity class that matches the query document criteria + * with the provided updated document. + * + * @param query the query document that specifies the criteria used to select a record to be updated + * @param update the update document that contains the updated object or $ operators to manipulate the existing + * object. + * @param entityClass class of the pojo to be operated on + * @param collectionName name of the collection to update the object in + * @return the WriteResult which lets you access the results of the previous write. + */ + Mono updateMulti(final Query query, final Update update, Class> entityClass, String collectionName); + + /** + * Remove the given object from the collection by id. + * + * @param object + * @return + */ + Mono remove(Object object); + + /** + * Removes the given object from the given collection. + * + * @param object + * @param collection must not be {@literal null} or empty. + */ + Mono remove(Object object, String collection); + + /** + * Remove the given object from the collection by id. + * + * @param objectToRemove + * @return + */ + Mono remove(Mono extends Object> objectToRemove); + + /** + * Removes the given object from the given collection. + * + * @param objectToRemove + * @param collection must not be {@literal null} or empty. + * @return + */ + Mono remove(Mono extends Object> objectToRemove, String collection); + + /** + * Remove all documents that match the provided query document criteria from the the collection used to store the + * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. + * + * @param query + * @param entityClass + * @return + */ + Mono remove(Query query, Class> entityClass); + + /** + * Remove all documents that match the provided query document criteria from the the collection used to store the + * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. + * + * @param query + * @param entityClass + * @param collectionName + * @return + */ + Mono remove(Query query, Class> entityClass, String collectionName); + + /** + * Remove all documents from the specified collection that match the provided query document criteria. There is no + * conversion/mapping done for any criteria using the id field. + * + * @param query the query document that specifies the criteria used to remove a record + * @param collectionName name of the collection where the objects will removed + */ + Mono remove(Query query, String collectionName); + + /** + * Returns and removes all documents form the specified collection that match the provided query. + * + * @param query + * @param collectionName + * @return + */ + Flux findAllAndRemove(Query query, String collectionName); + + /** + * Returns and removes all documents matching the given query form the collection used to store the entityClass. + * + * @param query + * @param entityClass + * @return + */ + Flux findAllAndRemove(Query query, Class entityClass); + + /** + * Returns and removes all documents that match the provided query document criteria from the the collection used to + * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in + * the query. + * + * @param query + * @param entityClass + * @param collectionName + * @return + */ + Flux findAllAndRemove(Query query, Class entityClass, String collectionName); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified + * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite + * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @return the List of converted objects + */ + Flux tail(Query query, Class entityClass); + + /** + * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified + * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite + * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}. + * + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless + * configured otherwise, an instance of {@link MappingMongoConverter} will be used. + * + * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more + * feature rich {@link Query}. + * + * @param query the query class that specifies the criteria used to find a record and also an optional fields + * specification + * @param entityClass the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from + * @return the List of converted objects + */ + Flux tail(Query query, Class entityClass, String collectionName); + + /** + * Returns the underlying {@link MongoConverter}. + * + * @return + */ + MongoConverter getConverter(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java new file mode 100644 index 000000000..fb1230ff4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -0,0 +1,2445 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.SerializationUtils.*; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.ApplicationEventPublisherAware; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.core.convert.ConversionService; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.EntityReader; +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.Metric; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mapping.model.MappingException; +import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback; +import org.springframework.data.mongodb.core.convert.DbRefProxyHandler; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DbRefResolverCallback; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; +import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.BasicDBObject; +import com.mongodb.CursorType; +import com.mongodb.DBCollection; +import com.mongodb.DBCursor; +import com.mongodb.DBRef; +import com.mongodb.Mongo; +import com.mongodb.MongoException; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReturnDocument; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; +import com.mongodb.reactivestreams.client.Success; +import com.mongodb.util.JSONParseException; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; + +/** + * Primary implementation of {@link ReactiveMongoOperations}. It simplifies the use of Reactive MongoDB usage and helps + * to avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and + * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindPublisher} and + * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the + * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a + * {@link SimpleReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services as + * bean reference. Note: The {@link SimpleReactiveMongoDatabaseFactory} should always be configured as a bean in the + * application context, in the first case given to the service directly, in the second case to the prepared template. + * + * @author Mark Paluch + * @since 2.0 + */ +public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware { + + public static final DbRefResolver NO_OP_REF_RESOLVER = new NoOpDbRefResolver(); + + private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class); + private static final String ID_FIELD = "_id"; + private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; + private static final Collection> ITERABLE_CLASSES; + + static { + + Set> iterableClasses = new HashSet<>(); + iterableClasses.add(List.class); + iterableClasses.add(Collection.class); + iterableClasses.add(Iterator.class); + iterableClasses.add(Publisher.class); + + ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses); + } + + private final MongoConverter mongoConverter; + private final MappingContext extends MongoPersistentEntity>, MongoPersistentProperty> mappingContext; + private final ReactiveMongoDatabaseFactory mongoDatabaseFactory; + private final PersistenceExceptionTranslator exceptionTranslator; + private final QueryMapper queryMapper; + private final UpdateMapper updateMapper; + + private int publisherBatchSize = 10; + private WriteConcern writeConcern; + private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; + private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; + private ReadPreference readPreference; + private ApplicationEventPublisher eventPublisher; + private MongoPersistentEntityIndexCreator indexCreator; + + /** + * Constructor used for a basic template configuration. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + */ + public ReactiveMongoTemplate(MongoClient mongoClient, String databaseName) { + this(new SimpleReactiveMongoDatabaseFactory(mongoClient, databaseName), null); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory) { + this(mongoDatabaseFactory, null); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + * @param mongoConverter + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter) { + + Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null!"); + + this.mongoDatabaseFactory = mongoDatabaseFactory; + this.exceptionTranslator = mongoDatabaseFactory.getExceptionTranslator(); + this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter() : mongoConverter; + this.queryMapper = new QueryMapper(this.mongoConverter); + this.updateMapper = new UpdateMapper(this.mongoConverter); + + // We always have a mapping context in the converter, whether it's a simple one or not + mappingContext = this.mongoConverter.getMappingContext(); + // We create indexes based on mapping events + + if (null != mappingContext && mappingContext instanceof MongoMappingContext) { + indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, + new BlockingIndexOptionsProvider(this)); + eventPublisher = new MongoMappingEventPublisher(indexCreator); + if (mappingContext instanceof ApplicationEventPublisherAware) { + ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + } + } + } + + /** + * Configures the {@link WriteResultChecking} to be used with the template. Setting {@literal null} will reset the + * default of {@link ReactiveMongoTemplate#DEFAULT_WRITE_RESULT_CHECKING}. + * + * @param resultChecking + */ + public void setWriteResultChecking(WriteResultChecking resultChecking) { + this.writeResultChecking = resultChecking == null ? DEFAULT_WRITE_RESULT_CHECKING : resultChecking; + } + + /** + * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} + * configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no + * {@link WriteConcern} will be used. + * + * @param writeConcern + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * Configures the {@link WriteConcernResolver} to be used with the template. + * + * @param writeConcernResolver + */ + public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) { + this.writeConcernResolver = writeConcernResolver; + } + + /** + * Used by @{link {@link #prepareCollection(MongoCollection)} to set the {@link ReadPreference} before any operations + * are performed. + * + * @param readPreference + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + /** + * Used to set a batch size when working with batches of {@link Publisher} emitting items to insert. + * + * @param publisherBatchSize batch size + */ + public void setPublisherBatchSize(int publisherBatchSize) { + this.publisherBatchSize = publisherBatchSize; + } + + /* + * (non-Javadoc) + * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + */ + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + + prepareIndexCreator(applicationContext); + + eventPublisher = applicationContext; + if (mappingContext instanceof ApplicationEventPublisherAware) { + ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + } + } + + /** + * Inspects the given {@link ApplicationContext} for {@link MongoPersistentEntityIndexCreator} and those in turn if + * they were registered for the current {@link MappingContext}. If no creator for the current {@link MappingContext} + * can be found we manually add the internally created one as {@link ApplicationListener} to make sure indexes get + * created appropriately for entity types persisted through this {@link ReactiveMongoTemplate} instance. + * + * @param context must not be {@literal null}. + */ + private void prepareIndexCreator(ApplicationContext context) { + + String[] indexCreators = context.getBeanNamesForType(MongoPersistentEntityIndexCreator.class); + + for (String creator : indexCreators) { + MongoPersistentEntityIndexCreator creatorBean = context.getBean(creator, MongoPersistentEntityIndexCreator.class); + if (creatorBean.isIndexCreatorFor(mappingContext)) { + return; + } + } + + if (context instanceof ConfigurableApplicationContext) { + ((ConfigurableApplicationContext) context).addApplicationListener(indexCreator); + } + } + + /** + * Returns the default {@link MongoConverter}. + * + * @return + */ + public MongoConverter getConverter() { + return this.mongoConverter; + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) + */ + public ReactiveIndexOperations reactiveIndexOps(String collectionName) { + return new DefaultReactiveIndexOperations(this, collectionName); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) + */ + public ReactiveIndexOperations reactiveIndexOps(Class> entityClass) { + return new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) + */ + public IndexOperations indexOps(String collectionName) { + return new BlockingIndexOperations(new DefaultReactiveIndexOperations(this, collectionName)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) + */ + public IndexOperations indexOps(Class> entityClass) { + return new BlockingIndexOperations(new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass))); + } + + public String getCollectionName(Class> entityClass) { + return this.determineCollectionName(entityClass); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(java.lang.String) + */ + public Mono executeCommand(String jsonCommand) { + + Assert.notNull(jsonCommand, "Command must not be empty!"); + + return executeCommand(Document.parse(jsonCommand)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document) + */ + public Mono executeCommand(final Document command) { + + Assert.notNull(command, "Command must not be null!"); + + return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference) : db.runCommand(command)) + .next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) + */ + public Mono executeCommand(final Document command, final ReadPreference readPreference) { + + Assert.notNull(command, "Command must not be null!"); + + return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference) : db.runCommand(command)) + .next(); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.ReactiveCollectionCallback) + */ + @Override + public Flux execute(Class> entityClass, ReactiveCollectionCallback action) { + return createFlux(determineCollectionName(entityClass), action); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(org.springframework.data.mongodb.core.ReactiveDbCallback) + */ + @Override + public Flux execute(ReactiveDatabaseCallback action) { + return createFlux(action); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.ReactiveCollectionCallback) + */ + public Flux execute(String collectionName, ReactiveCollectionCallback callback) { + Assert.notNull(callback); + return createFlux(collectionName, callback); + } + + /** + * Create a reusable Flux for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Flux} wrapping the {@link ReactiveDatabaseCallback}. + */ + public Flux createFlux(ReactiveDatabaseCallback callback) { + + Assert.notNull(callback); + + return Flux.defer(() -> callback.doInDB(getMongoDatabase())).onErrorResumeWith(translateFluxException()); + } + + /** + * Create a reusable Mono for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Mono} wrapping the {@link ReactiveDatabaseCallback}. + */ + public Mono createMono(final ReactiveDatabaseCallback callback) { + + Assert.notNull(callback); + + return Mono.defer(() -> Mono.from(callback.doInDB(getMongoDatabase()))).otherwise(translateMonoException()); + } + + /** + * Create a reusable {@link Flux} for the {@code collectionName} and {@link ReactiveCollectionCallback}. + * + * @param collectionName must not be empty or {@literal null}. + * @param callback must not be {@literal null}. + * @return a reusable {@link Flux} wrapping the {@link ReactiveCollectionCallback}. + */ + public Flux createFlux(String collectionName, ReactiveCollectionCallback callback) { + + Assert.hasText(collectionName); + Assert.notNull(callback); + + Mono> collectionPublisher = Mono + .fromCallable(() -> getAndPrepareCollection(getMongoDatabase(), collectionName)); + + return collectionPublisher.flatMap(callback::doInCollection).onErrorResumeWith(translateFluxException()); + } + + /** + * Create a reusable {@link Mono} for the {@code collectionName} and {@link ReactiveCollectionCallback}. + * + * @param collectionName must not be empty or {@literal null}. + * @param callback must not be {@literal null}. + * @param + * @return a reusable {@link Mono} wrapping the {@link ReactiveCollectionCallback}. + */ + public Mono createMono(String collectionName, ReactiveCollectionCallback callback) { + + Assert.hasText(collectionName); + Assert.notNull(callback); + + Mono> collectionPublisher = Mono + .fromCallable(() -> getAndPrepareCollection(getMongoDatabase(), collectionName)); + + return collectionPublisher.then(collection -> Mono.from(callback.doInCollection(collection))) + .otherwise(translateMonoException()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class) + */ + public Mono> createCollection(Class entityClass) { + return createCollection(determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) + */ + public Mono> createCollection(Class entityClass, + CollectionOptions collectionOptions) { + return createCollection(determineCollectionName(entityClass), collectionOptions); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String) + */ + public Mono> createCollection(final String collectionName) { + return doCreateCollection(collectionName, new CreateCollectionOptions()); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) + */ + public Mono> createCollection(final String collectionName, + final CollectionOptions collectionOptions) { + return doCreateCollection(collectionName, convertToCreateCollectionOptions(collectionOptions)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollection(java.lang.String) + */ + public MongoCollection getCollection(final String collectionName) { + return execute((MongoDatabaseCallback>) db -> db.getCollection(collectionName)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.Class) + */ + public Mono collectionExists(Class entityClass) { + return collectionExists(determineCollectionName(entityClass)); + } + + /* (non-Javadoc) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.String) + */ + public Mono