Compare commits
80 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f987f44ef | ||
|
|
5df22233d8 | ||
|
|
9751c6bc8b | ||
|
|
7dc509953a | ||
|
|
2f4f1e524d | ||
|
|
26ddf9d59f | ||
|
|
96ff4e1d14 | ||
|
|
2af13c27a7 | ||
|
|
f4d2fc6231 | ||
|
|
3ab679bcc7 | ||
|
|
318d552797 | ||
|
|
8cfbd39c7e | ||
|
|
3a14af5485 | ||
|
|
00034d899a | ||
|
|
fba6d7d8be | ||
|
|
95c4707902 | ||
|
|
32f3e60c9f | ||
|
|
b57e571033 | ||
|
|
6604c507dd | ||
|
|
e1df28797a | ||
|
|
c6630aa279 | ||
|
|
bf921cdbd7 | ||
|
|
a1b4e6df59 | ||
|
|
4b3312998a | ||
|
|
c5501db577 | ||
|
|
936a0d35f7 | ||
|
|
5dd91d0b6d | ||
|
|
28510de6c8 | ||
|
|
4bbf4cd5cf | ||
|
|
90bd3f0f18 | ||
|
|
1e35116419 | ||
|
|
dd336f0ecb | ||
|
|
d020219ded | ||
|
|
0345eff69a | ||
|
|
55fee27fb6 | ||
|
|
ffba352e15 | ||
|
|
1118df5550 | ||
|
|
29f05af733 | ||
|
|
7bac739146 | ||
|
|
6366d3cec1 | ||
|
|
44913abd80 | ||
|
|
7a7f7c942d | ||
|
|
e9c9938016 | ||
|
|
c9da0a75ff | ||
|
|
581961e79a | ||
|
|
0e0d726457 | ||
|
|
7b5fea960f | ||
|
|
a04821ff90 | ||
|
|
a6bd41bcf2 | ||
|
|
6387eb9762 | ||
|
|
5fb4b036bb | ||
|
|
4f0dc04a81 | ||
|
|
ee59c6b774 | ||
|
|
b0b905ddb7 | ||
|
|
7f7be5e47d | ||
|
|
5a49aa6519 | ||
|
|
b68079c421 | ||
|
|
fde49f2a5a | ||
|
|
4d73d76b9f | ||
|
|
8f2c806403 | ||
|
|
d0eb76946e | ||
|
|
df43d7fcdb | ||
|
|
7b34a602ed | ||
|
|
c829387c82 | ||
|
|
a4e12a96c9 | ||
|
|
7e3f7bd861 | ||
|
|
46ab6b4c94 | ||
|
|
8029acb3fb | ||
|
|
96ffb0b7f4 | ||
|
|
7849b5333b | ||
|
|
fb45b4eb2a | ||
|
|
a6a84421b4 | ||
|
|
c4dcc7d0f5 | ||
|
|
c25a1a9e53 | ||
|
|
81c68955fe | ||
|
|
22ca597fca | ||
|
|
6259cd2c3b | ||
|
|
f153399c3b | ||
|
|
65508eb01b | ||
|
|
a7c25c8524 |
46
Jenkinsfile
vendored
46
Jenkinsfile
vendored
@@ -46,32 +46,16 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 11 + MongoDB 4.2') {
|
||||
stage('Publish JDK 14 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk11-mongodb-4.2/**"
|
||||
changeset "ci/openjdk14-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk11-with-mongodb-4.2.0", "ci/openjdk11-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 13 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk13-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk13-with-mongodb-4.2.0", "ci/openjdk13-mongodb-4.2/")
|
||||
def image = docker.build("springci/spring-data-openjdk14-with-mongodb-4.2.0", "ci/openjdk14-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
@@ -155,30 +139,10 @@ pipeline {
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk11)") {
|
||||
stage("test: baseline (jdk14)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk11-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk13)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk13-with-mongodb-4.2.0:latest'
|
||||
image 'springci/spring-data-openjdk14-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM adoptopenjdk/openjdk13:latest
|
||||
FROM adoptopenjdk/openjdk14:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
8
pom.xml
8
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.M3</version>
|
||||
<version>3.0.0.RC2</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.3.0.M3</version>
|
||||
<version>2.3.0.RC2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.3.0.M3</springdata.commons>
|
||||
<mongo>4.0.0-beta1</mongo>
|
||||
<springdata.commons>2.3.0.RC2</springdata.commons>
|
||||
<mongo>4.0.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.M3</version>
|
||||
<version>3.0.0.RC2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.M3</version>
|
||||
<version>3.0.0.RC2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.M3</version>
|
||||
<version>3.0.0.RC2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -344,12 +344,6 @@
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
@@ -41,7 +42,7 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
Mono<MongoDatabase> getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
@@ -50,7 +51,7 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
@@ -64,10 +65,7 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
CodecRegistry getCodecRegistry();
|
||||
|
||||
/**
|
||||
* Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}.
|
||||
|
||||
@@ -41,6 +41,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
@@ -142,14 +143,13 @@ public class ReactiveMongoDatabaseUtils {
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.map(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
})
|
||||
.onErrorResume(NoTransactionException.class,
|
||||
e -> Mono.fromSupplier(() -> getMongoDatabaseOrDefault(dbName, factory)))
|
||||
.defaultIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
.flatMap(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> getMongoDatabaseOrDefault(dbName, factory))
|
||||
.switchIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static MongoDatabase getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
private static Mono<MongoDatabase> getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
@@ -25,10 +24,13 @@ import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
|
||||
@@ -39,7 +41,7 @@ import com.mongodb.client.MongoClients;
|
||||
* @since 2.1
|
||||
* @see MongoConfigurationSupport
|
||||
*/
|
||||
@Configuration
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
@@ -58,11 +60,12 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
* @see #mongoDbFactory()
|
||||
* @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
|
||||
return new MongoTemplate(databaseFactory, converter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -70,8 +73,7 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
* {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
* @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter)
|
||||
*/
|
||||
@Bean
|
||||
public MongoDatabaseFactory mongoDbFactory() {
|
||||
@@ -98,21 +100,20 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
@@ -23,7 +23,9 @@ import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
@@ -38,7 +40,7 @@ import com.mongodb.reactivestreams.client.MongoClients;
|
||||
* @since 2.0
|
||||
* @see MongoConfigurationSupport
|
||||
*/
|
||||
@Configuration
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
@@ -57,11 +59,14 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
/**
|
||||
* Creates {@link ReactiveMongoOperations}.
|
||||
*
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public ReactiveMongoOperations reactiveMongoTemplate() throws Exception {
|
||||
return new ReactiveMongoTemplate(reactiveMongoDbFactory(), mappingMongoConverter());
|
||||
public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MappingMongoConverter mongoConverter) {
|
||||
return new ReactiveMongoTemplate(databaseFactory, mongoConverter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -69,7 +74,7 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
* {@link MongoClient} instance configured in {@link #reactiveMongoClient()}.
|
||||
*
|
||||
* @see #reactiveMongoClient()
|
||||
* @see #reactiveMongoTemplate()
|
||||
* @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
@@ -79,20 +84,20 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @return never {@literal null}.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(reactiveMongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
@@ -96,6 +96,9 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME;
|
||||
|
||||
String autoIndexCreation = element.getAttribute("auto-index-creation");
|
||||
boolean autoIndexCreationEnabled = StringUtils.hasText(autoIndexCreation) && Boolean.valueOf(autoIndexCreation);
|
||||
|
||||
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element));
|
||||
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
@@ -199,6 +202,11 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) {
|
||||
return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false);
|
||||
}
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) {
|
||||
|
||||
String ctxRef = element.getAttribute("mapping-context-ref");
|
||||
|
||||
@@ -226,6 +234,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation);
|
||||
|
||||
parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder);
|
||||
|
||||
ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME
|
||||
|
||||
@@ -80,11 +80,12 @@ public abstract class MongoConfigurationSupport {
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||
throws ClassNotFoundException {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
@@ -103,7 +104,7 @@ public abstract class MongoConfigurationSupport {
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public CustomConversions customConversions() {
|
||||
public MongoCustomConversions customConversions() {
|
||||
return MongoCustomConversions.create(this::configureConverters);
|
||||
}
|
||||
|
||||
@@ -198,12 +199,12 @@ public abstract class MongoConfigurationSupport {
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal true} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will be set to {@literal false}.
|
||||
* @return {@literal false} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -216,6 +217,7 @@ public abstract class MongoConfigurationSupport {
|
||||
protected MongoClientSettings mongoClientSettings() {
|
||||
|
||||
MongoClientSettings.Builder builder = MongoClientSettings.builder();
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
configureClientSettings(builder);
|
||||
return builder.build();
|
||||
}
|
||||
@@ -227,6 +229,6 @@ public abstract class MongoConfigurationSupport {
|
||||
* @since 3.0
|
||||
*/
|
||||
protected void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
// customization hook
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
@@ -62,6 +63,7 @@ import com.mongodb.client.model.*;
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -300,6 +302,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
@@ -421,38 +424,52 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (null != bulkOperationContext.getEventPublisher()) {
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
}
|
||||
|
||||
@@ -475,6 +492,16 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -43,7 +44,7 @@ import com.mongodb.client.MongoCollection;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
@@ -170,6 +171,18 @@ public interface ExecutableFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -291,9 +304,21 @@ public interface ExecutableFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,6 +19,7 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -30,7 +31,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -44,6 +45,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ExecutableMapReduceOperation {
|
||||
@@ -146,6 +148,18 @@ public interface ExecutableMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -119,6 +120,18 @@ public interface ExecutableRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
@@ -210,6 +211,18 @@ public interface ExecutableUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -80,7 +80,11 @@ public class MappedDocument {
|
||||
}
|
||||
|
||||
public Bson getIdFilter() {
|
||||
return Filters.eq(ID_FIELD, document.get(ID_FIELD));
|
||||
return new Document(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public Object get(String key) {
|
||||
return document.get(key);
|
||||
}
|
||||
|
||||
public UpdateDefinition updateWithoutId() {
|
||||
|
||||
@@ -88,20 +88,12 @@ import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCre
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.*;
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupBy;
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupByResults;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Meta;
|
||||
@@ -167,6 +159,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Andreas Zink
|
||||
* @author Cimon Lucas
|
||||
* @author Michael J. Simons
|
||||
* @author Roman Puchkovskiy
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
|
||||
|
||||
@@ -246,7 +239,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.projectionFactory = new SpelAwareProxyProjectionFactory();
|
||||
this.operations = new EntityOperations(this.mongoConverter.getMappingContext());
|
||||
this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext());
|
||||
this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, mongoDbFactory);
|
||||
this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations,
|
||||
mongoDbFactory);
|
||||
|
||||
// We always have a mapping context in the converter, whether it's a simple one or not
|
||||
mappingContext = this.mongoConverter.getMappingContext();
|
||||
@@ -432,8 +426,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext.getPersistentEntity(entityType);
|
||||
|
||||
Document mappedFields = getMappedFieldsObject(query.getFieldsObject(), persistentEntity, returnType);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), persistentEntity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
|
||||
Document mappedQuery = queryContext.getMappedQuery(persistentEntity);
|
||||
Document mappedFields = queryContext.getMappedFields(persistentEntity, returnType, projectionFactory);
|
||||
|
||||
FindIterable<Document> cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection,
|
||||
col -> col.find(mappedQuery, Document.class).projection(mappedFields));
|
||||
@@ -1062,7 +1058,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
Document mappedFields = queryContext.getMappedFields(entity);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, resultType, projectionFactory);
|
||||
Document mappedSort = queryContext.getMappedSort(entity);
|
||||
|
||||
replacement = maybeCallBeforeConvert(replacement, collectionName);
|
||||
@@ -1071,8 +1067,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName));
|
||||
maybeCallBeforeSave(replacement, mappedReplacement, collectionName);
|
||||
|
||||
return doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort,
|
||||
T saved = doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort,
|
||||
queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, resultType);
|
||||
|
||||
if (saved != null) {
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, mappedReplacement, collectionName));
|
||||
return maybeCallAfterSave(saved, mappedReplacement, collectionName);
|
||||
}
|
||||
|
||||
return saved;
|
||||
}
|
||||
|
||||
// Find methods that take a Query to express the query and that return a single object that is also removed from the
|
||||
@@ -1166,7 +1169,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
protected void ensureNotIterable(@Nullable Object o) {
|
||||
if (null != o) {
|
||||
if (o != null) {
|
||||
if (o.getClass().isArray() || ITERABLE_CLASSES.contains(o.getClass().getName())) {
|
||||
throw new IllegalArgumentException("Cannot use a collection here.");
|
||||
}
|
||||
@@ -1234,8 +1237,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
T saved = populateIdIfNecessary(initialized, id);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName));
|
||||
|
||||
return saved;
|
||||
return maybeCallAfterSave(saved, dbDoc, collectionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1328,8 +1330,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
if (i < ids.size()) {
|
||||
T saved = populateIdIfNecessary(obj, ids.get(i));
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, documentList.get(i), collectionName));
|
||||
savedObjects.add(saved);
|
||||
Document doc = documentList.get(i);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, doc, collectionName));
|
||||
savedObjects.add(maybeCallAfterSave(saved, doc, collectionName));
|
||||
} else {
|
||||
savedObjects.add(obj);
|
||||
}
|
||||
@@ -1399,7 +1402,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
maybeEmitEvent(new AfterSaveEvent<>(toSave, mapped.getDocument(), collectionName));
|
||||
|
||||
return toSave;
|
||||
return maybeCallAfterSave(toSave, mapped.getDocument(), collectionName);
|
||||
}
|
||||
|
||||
protected <T> T doSave(String collectionName, T objectToSave, MongoWriter<T> writer) {
|
||||
@@ -1420,7 +1423,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
T saved = populateIdIfNecessary(objectToSave, id);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName));
|
||||
|
||||
return saved;
|
||||
return maybeCallAfterSave(saved, dbDoc, collectionName);
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@@ -1480,23 +1483,38 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass,
|
||||
dbDoc, null);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
MappedDocument mapped = MappedDocument.of(dbDoc);
|
||||
|
||||
MongoCollection<Document> collectionToUse = writeConcernToUse == null //
|
||||
? collection //
|
||||
: collection.withWriteConcern(writeConcernToUse);
|
||||
|
||||
if (!mapped.hasId()) {
|
||||
if (writeConcernToUse == null) {
|
||||
collection.insertOne(dbDoc);
|
||||
} else {
|
||||
collection.withWriteConcern(writeConcernToUse).insertOne(dbDoc);
|
||||
}
|
||||
} else if (writeConcernToUse == null) {
|
||||
collection.replaceOne(mapped.getIdFilter(), dbDoc, new ReplaceOptions().upsert(true));
|
||||
collectionToUse.insertOne(dbDoc);
|
||||
} else {
|
||||
collection.withWriteConcern(writeConcernToUse).replaceOne(mapped.getIdFilter(), dbDoc,
|
||||
new ReplaceOptions().upsert(true));
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true);
|
||||
Document replacement = updateContext.getMappedUpdate(entity);
|
||||
|
||||
Document filter = updateContext.getMappedQuery(entity);
|
||||
|
||||
if (updateContext.requiresShardKey(filter, entity)) {
|
||||
|
||||
if (entity.getShardKey().isImmutable()) {
|
||||
filter = updateContext.applyShardKey(entity, filter, null);
|
||||
} else {
|
||||
filter = updateContext.applyShardKey(entity, filter,
|
||||
collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first());
|
||||
}
|
||||
}
|
||||
|
||||
collectionToUse.replaceOne(filter, replacement, new ReplaceOptions().upsert(true));
|
||||
}
|
||||
return mapped.getId();
|
||||
});
|
||||
@@ -1615,8 +1633,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
if (!UpdateMapper.isUpdateObject(updateObj)) {
|
||||
|
||||
Document filter = new Document(queryObj);
|
||||
|
||||
if (updateContext.requiresShardKey(filter, entity)) {
|
||||
|
||||
if (entity.getShardKey().isImmutable()) {
|
||||
filter = updateContext.applyShardKey(entity, filter, null);
|
||||
} else {
|
||||
filter = updateContext.applyShardKey(entity, filter,
|
||||
collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first());
|
||||
}
|
||||
}
|
||||
|
||||
ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass);
|
||||
return collection.replaceOne(queryObj, updateObj, replaceOptions);
|
||||
return collection.replaceOne(filter, updateObj, replaceOptions);
|
||||
} else {
|
||||
return multi ? collection.updateMany(queryObj, updateObj, opts)
|
||||
: collection.updateOne(queryObj, updateObj, opts);
|
||||
@@ -1793,7 +1823,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
if (query.getMeta().getMaxTimeMsec() != null) {
|
||||
mapReduce = mapReduce.maxTime(query.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
mapReduce = mapReduce.sort(getMappedSortObject(query, domainType));
|
||||
|
||||
Document mappedSort = getMappedSortObject(query, domainType);
|
||||
if (mappedSort != null && !mappedSort.isEmpty()) {
|
||||
mapReduce = mapReduce.sort(getMappedSortObject(query, domainType));
|
||||
}
|
||||
|
||||
mapReduce = mapReduce
|
||||
.filter(queryMapper.getMappedObject(query.getQueryObject(), mappingContext.getPersistentEntity(domainType)));
|
||||
@@ -2279,33 +2313,49 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
protected <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (null != eventPublisher) {
|
||||
if (eventPublisher != null) {
|
||||
eventPublisher.publishEvent(event);
|
||||
}
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T maybeCallBeforeConvert(T object, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(BeforeConvertCallback.class, object, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T maybeCallBeforeSave(T object, Document document, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(BeforeSaveCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
protected <T> T maybeCallAfterSave(T object, Document document, String collection) {
|
||||
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(AfterSaveCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
protected <T> T maybeCallAfterConvert(T object, Document document, String collection) {
|
||||
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the specified collection using the provided options
|
||||
*
|
||||
@@ -2393,8 +2443,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
Document mappedFields = queryMapper.getMappedObject(fields, entity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: {} fields: {} for class: {} in collection: {}", serializeToJsonSafely(query),
|
||||
@@ -2444,8 +2495,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
Document mappedFields = queryMapper.getMappedFields(fields, entity);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
@@ -2467,8 +2519,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
|
||||
|
||||
Document mappedFields = getMappedFieldsObject(fields, entity, targetClass);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, targetClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
@@ -2797,23 +2850,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
private Document getMappedFieldsObject(Document fields, @Nullable MongoPersistentEntity<?> entity,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (entity == null) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
return queryMapper.getMappedFields(projectedFields, entity);
|
||||
}
|
||||
|
||||
return queryMapper.getMappedFields(projectedFields, mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to convert the given {@link RuntimeException} into a {@link DataAccessException} but returns the original
|
||||
* exception if the conversation failed. Thus allows safe re-throwing of the return value.
|
||||
@@ -3075,6 +3111,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Roman Puchkovskiy
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private class ReadDocumentCallback<T> implements DocumentCallback<T> {
|
||||
@@ -3084,16 +3121,18 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private final String collectionName;
|
||||
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document object) {
|
||||
public T doWith(@Nullable Document document) {
|
||||
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(object, type, collectionName));
|
||||
T source = null;
|
||||
|
||||
if (document != null) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
source = reader.read(type, document);
|
||||
}
|
||||
|
||||
T source = reader.read(type, object);
|
||||
|
||||
if (null != source) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(object, source, collectionName));
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
source = maybeCallAfterConvert(source, document, collectionName);
|
||||
}
|
||||
|
||||
return source;
|
||||
@@ -3122,24 +3161,23 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document object) {
|
||||
public T doWith(@Nullable Document document) {
|
||||
|
||||
if (object == null) {
|
||||
if (document == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Class<?> typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) ? entityType
|
||||
: targetType;
|
||||
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(object, targetType, collectionName));
|
||||
}
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, targetType, collectionName));
|
||||
|
||||
Object source = reader.read(typeToRead, object);
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
|
||||
if (null != result) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(object, result, collectionName));
|
||||
if (result != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
|
||||
result = maybeCallAfterConvert(result, document, collectionName);
|
||||
}
|
||||
|
||||
return (T) result;
|
||||
|
||||
@@ -16,7 +16,10 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -37,14 +40,17 @@ import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.ShardKey;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
@@ -55,9 +61,10 @@ import com.mongodb.client.model.UpdateOptions;
|
||||
/**
|
||||
* {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed.
|
||||
* This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options
|
||||
* for {@literal count}, {@literal remove}, ... <br />
|
||||
* for {@literal count}, {@literal remove}, and other methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class QueryOperations {
|
||||
@@ -65,9 +72,11 @@ class QueryOperations {
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final EntityOperations entityOperations;
|
||||
private final PropertyOperations propertyOperations;
|
||||
private final CodecRegistryProvider codecRegistryProvider;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final AggregationUtil aggregationUtil;
|
||||
private final Map<Class<?>, Document> mappedShardKey = new ConcurrentHashMap<>(1);
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link QueryOperations}.
|
||||
@@ -75,14 +84,16 @@ class QueryOperations {
|
||||
* @param queryMapper must not be {@literal null}.
|
||||
* @param updateMapper must not be {@literal null}.
|
||||
* @param entityOperations must not be {@literal null}.
|
||||
* @param propertyOperations must not be {@literal null}.
|
||||
* @param codecRegistryProvider must not be {@literal null}.
|
||||
*/
|
||||
QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations,
|
||||
CodecRegistryProvider codecRegistryProvider) {
|
||||
PropertyOperations propertyOperations, CodecRegistryProvider codecRegistryProvider) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.entityOperations = entityOperations;
|
||||
this.propertyOperations = propertyOperations;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.mappingContext = queryMapper.getMappingContext();
|
||||
this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
@@ -154,6 +165,15 @@ class QueryOperations {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param replacement the {@link MappedDocument mapped replacement} document.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) {
|
||||
return new UpdateContext(replacement, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance removing all matching documents.
|
||||
*
|
||||
@@ -235,14 +255,31 @@ class QueryOperations {
|
||||
return queryMapper.getMappedObject(getQueryObject(), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped {@link Query#getFieldsObject() fields projection}
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedFields(query.getFieldsObject(), entity);
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
Document mappedFields = fields;
|
||||
|
||||
if (entity == null) {
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields, entity);
|
||||
} else {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields,
|
||||
mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
if (entity != null && entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -253,7 +290,6 @@ class QueryOperations {
|
||||
*/
|
||||
Document getMappedSort(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedSort(query.getSortObject(), entity);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -305,6 +341,10 @@ class QueryOperations {
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType, ProjectionFactory projectionFactory) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedFields(new Document(fieldName, 1), entity);
|
||||
}
|
||||
@@ -353,7 +393,6 @@ class QueryOperations {
|
||||
if (ClassUtils.isAssignable(requestedTargetType, propertyType)) {
|
||||
conversionTargetType = propertyType;
|
||||
}
|
||||
|
||||
} catch (PropertyReferenceException e) {
|
||||
// just don't care about it as we default to Object.class anyway.
|
||||
}
|
||||
@@ -491,7 +530,8 @@ class QueryOperations {
|
||||
|
||||
private final boolean multi;
|
||||
private final boolean upsert;
|
||||
private final UpdateDefinition update;
|
||||
private final @Nullable UpdateDefinition update;
|
||||
private final @Nullable MappedDocument mappedDocument;
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
@@ -520,6 +560,16 @@ class QueryOperations {
|
||||
this.multi = multi;
|
||||
this.upsert = upsert;
|
||||
this.update = update;
|
||||
this.mappedDocument = null;
|
||||
}
|
||||
|
||||
UpdateContext(MappedDocument update, boolean upsert) {
|
||||
|
||||
super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
|
||||
this.multi = false;
|
||||
this.upsert = upsert;
|
||||
this.mappedDocument = update;
|
||||
this.update = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -544,7 +594,7 @@ class QueryOperations {
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
if (update != null && update.hasArrayFilters()) {
|
||||
options
|
||||
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||
}
|
||||
@@ -602,6 +652,53 @@ class QueryOperations {
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
<T> Document applyShardKey(MongoPersistentEntity<T> domainType, Document filter, @Nullable Document existing) {
|
||||
|
||||
Document shardKeySource = existing != null ? existing
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
|
||||
boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity<?> domainType) {
|
||||
|
||||
return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType)
|
||||
&& !filter.keySet().containsAll(getMappedShardKeyFields(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities
|
||||
* {@literal id} property.
|
||||
* @since 3.0
|
||||
*/
|
||||
private boolean shardedById(MongoPersistentEntity<?> domainType) {
|
||||
|
||||
ShardKey shardKey = domainType.getShardKey();
|
||||
if (shardKey.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String key = shardKey.getPropertyNames().iterator().next();
|
||||
if ("_id".equals(key)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentProperty idProperty = domainType.getIdProperty();
|
||||
return idProperty != null && idProperty.getName().equals(key);
|
||||
}
|
||||
|
||||
Set<String> getMappedShardKeyFields(MongoPersistentEntity<?> entity) {
|
||||
return getMappedShardKey(entity).keySet();
|
||||
}
|
||||
|
||||
Document getMappedShardKey(MongoPersistentEntity<?> entity) {
|
||||
return mappedShardKey.computeIfAbsent(entity.getType(),
|
||||
key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}.
|
||||
*
|
||||
@@ -625,8 +722,11 @@ class QueryOperations {
|
||||
*/
|
||||
Document getMappedUpdate(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
return update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
if (update != null) {
|
||||
return update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
}
|
||||
return mappedDocument.getDocument();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,6 +19,7 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
@@ -38,13 +39,14 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Juergen Zimmermann
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFindOperation {
|
||||
@@ -144,6 +146,18 @@ public interface ReactiveFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -259,9 +273,21 @@ public interface ReactiveFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,6 +19,7 @@ import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -30,7 +31,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -146,6 +147,18 @@ public interface ReactiveMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -66,6 +66,7 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
* @see Flux
|
||||
* @see Mono
|
||||
@@ -298,7 +299,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param collectionName name of the collection.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
*/
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
Mono<MongoCollection<Document>> getCollection(String collectionName);
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
|
||||
@@ -39,7 +39,6 @@ import org.reactivestreams.Publisher;
|
||||
import org.reactivestreams.Subscriber;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
@@ -92,17 +91,9 @@ import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.*;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Meta;
|
||||
import org.springframework.data.mongodb.core.query.Meta.CursorOption;
|
||||
@@ -164,6 +155,8 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware {
|
||||
@@ -266,7 +259,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
this.mappingContext = this.mongoConverter.getMappingContext();
|
||||
this.operations = new EntityOperations(this.mappingContext);
|
||||
this.propertyOperations = new PropertyOperations(this.mappingContext);
|
||||
this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, mongoDatabaseFactory);
|
||||
this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations,
|
||||
mongoDatabaseFactory);
|
||||
|
||||
// We create indexes based on mapping events
|
||||
if (this.mappingContext instanceof MongoMappingContext) {
|
||||
@@ -725,15 +719,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollection(java.lang.String)
|
||||
*/
|
||||
public MongoCollection<Document> getCollection(String collectionName) {
|
||||
public Mono<MongoCollection<Document>> getCollection(String collectionName) {
|
||||
|
||||
Assert.notNull(collectionName, "Collection name must not be null!");
|
||||
|
||||
try {
|
||||
return this.mongoDatabaseFactory.getMongoDatabase().getCollection(collectionName);
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
return createMono(db -> Mono.just(db.getCollection(collectionName)));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -784,7 +774,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return createFlux(MongoDatabase::listCollectionNames);
|
||||
}
|
||||
|
||||
public MongoDatabase getMongoDatabase() {
|
||||
public Mono<MongoDatabase> getMongoDatabase() {
|
||||
return mongoDatabaseFactory.getMongoDatabase();
|
||||
}
|
||||
|
||||
@@ -1051,7 +1041,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
cursor = cursor.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
return Flux.from(cursor).map(readCallback::doWith);
|
||||
return Flux.from(cursor).concatMap(readCallback::doWith);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1094,7 +1084,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
|
||||
return aggregate($geoNear, collection, Document.class) //
|
||||
.map(callback::doWith);
|
||||
.concatMap(callback::doWith);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1169,28 +1159,33 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
Document mappedFields = queryContext.getMappedFields(entity);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, resultType, projectionFactory);
|
||||
Document mappedSort = queryContext.getMappedSort(entity);
|
||||
|
||||
return Mono.just(PersistableEntityModel.of(replacement, collectionName)) //
|
||||
.doOnNext(it -> maybeEmitEvent(new BeforeConvertEvent<>(it.getSource(), it.getCollection()))) //
|
||||
.flatMap(it -> maybeCallBeforeConvert(it.getSource(), it.getCollection()).map(it::mutate))
|
||||
.map(it -> it
|
||||
.addTargetDocument(operations.forEntity(it.getSource()).toMappedDocument(mongoConverter).getDocument())) //
|
||||
.doOnNext(it -> maybeEmitEvent(new BeforeSaveEvent(it.getSource(), it.getTarget(), it.getCollection()))) //
|
||||
.flatMap(it -> {
|
||||
return Mono.defer(() -> {
|
||||
|
||||
PersistableEntityModel<S> flowObject = (PersistableEntityModel<S>) it;
|
||||
return maybeCallBeforeSave(flowObject.getSource(), flowObject.getTarget(), flowObject.getCollection())
|
||||
.map(potentiallyModified -> PersistableEntityModel.of(potentiallyModified, flowObject.getTarget(),
|
||||
flowObject.getCollection()));
|
||||
}).flatMap(it -> {
|
||||
PersistableEntityModel<S> pem = PersistableEntityModel.of(replacement, collectionName);
|
||||
|
||||
PersistableEntityModel<S> flowObject = (PersistableEntityModel<S>) it;
|
||||
return doFindAndReplace(flowObject.getCollection(), mappedQuery, mappedFields, mappedSort,
|
||||
queryContext.getCollation(entityType).orElse(null), entityType, flowObject.getTarget(), options,
|
||||
resultType);
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(pem.getSource(), pem.getCollection()));
|
||||
|
||||
return maybeCallBeforeConvert(pem.getSource(), pem.getCollection()).map(pem::mutate).flatMap(it -> {
|
||||
PersistableEntityModel<S> mapped = it
|
||||
.addTargetDocument(operations.forEntity(it.getSource()).toMappedDocument(mongoConverter).getDocument());
|
||||
maybeEmitEvent(new BeforeSaveEvent(mapped.getSource(), mapped.getTarget(), mapped.getCollection()));
|
||||
|
||||
return maybeCallBeforeSave(it.getSource(), mapped.getTarget(), mapped.getCollection())
|
||||
.map(potentiallyModified -> PersistableEntityModel.of(potentiallyModified, mapped.getTarget(),
|
||||
mapped.getCollection()));
|
||||
}).flatMap(it -> {
|
||||
|
||||
Mono<T> afterFindAndReplace = doFindAndReplace(it.getCollection(), mappedQuery, mappedFields, mappedSort,
|
||||
queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), options, resultType);
|
||||
return afterFindAndReplace.flatMap(saved -> {
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), it.getCollection()));
|
||||
return maybeCallAfterSave(saved, it.getTarget(), it.getCollection());
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1337,21 +1332,22 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
AdaptibleEntity<T> entity = operations.forEntity(it.getSource(), mongoConverter.getConversionService());
|
||||
entity.assertUpdateableIdIfNotSet();
|
||||
|
||||
return PersistableEntityModel.of(entity.initializeVersionProperty(),
|
||||
PersistableEntityModel<T> model = PersistableEntityModel.of(entity.initializeVersionProperty(),
|
||||
entity.toMappedDocument(writer).getDocument(), it.getCollection());
|
||||
}).doOnNext(it -> maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), it.getTarget(), it.getCollection()))) //
|
||||
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(model.getSource(), model.getTarget(), model.getCollection()));
|
||||
return model;
|
||||
})//
|
||||
.flatMap(it -> {
|
||||
|
||||
return maybeCallBeforeSave(it.getSource(), it.getTarget(), it.getCollection()).map(it::mutate);
|
||||
|
||||
}).flatMap(it -> {
|
||||
|
||||
return insertDocument(it.getCollection(), it.getTarget(), it.getSource().getClass()).map(id -> {
|
||||
return insertDocument(it.getCollection(), it.getTarget(), it.getSource().getClass()).flatMap(id -> {
|
||||
|
||||
T saved = operations.forEntity(it.getSource(), mongoConverter.getConversionService())
|
||||
.populateIdIfNecessary(id);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), collectionName));
|
||||
return saved;
|
||||
return maybeCallAfterSave(saved, it.getTarget(), collectionName);
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -1437,13 +1433,14 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return insertDocumentList(collectionName, documents).thenMany(Flux.fromIterable(tuples));
|
||||
});
|
||||
|
||||
return insertDocuments.map(tuple -> {
|
||||
return insertDocuments.flatMap(tuple -> {
|
||||
|
||||
Object id = MappedDocument.of(tuple.getT2()).getId();
|
||||
Document document = tuple.getT2();
|
||||
Object id = MappedDocument.of(document).getId();
|
||||
|
||||
T saved = tuple.getT1().populateIdIfNecessary(id);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, tuple.getT2(), collectionName));
|
||||
return saved;
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, document, collectionName));
|
||||
return maybeCallAfterSave(saved, document, collectionName);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1523,9 +1520,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(toConvert, document, collectionName));
|
||||
return maybeCallBeforeSave(toConvert, document, collectionName).flatMap(it -> {
|
||||
|
||||
return doUpdate(collectionName, query, mapped.updateWithoutId(), it.getClass(), false, false).map(result -> {
|
||||
return maybeEmitEvent(new AfterSaveEvent<T>(it, document, collectionName)).getSource();
|
||||
});
|
||||
return doUpdate(collectionName, query, mapped.updateWithoutId(), it.getClass(), false, false)
|
||||
.flatMap(result -> {
|
||||
maybeEmitEvent(new AfterSaveEvent<T>(it, document, collectionName));
|
||||
return maybeCallAfterSave(it, document, collectionName);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1547,10 +1546,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
return maybeCallBeforeSave(toConvert, dbDoc, collectionName).flatMap(it -> {
|
||||
|
||||
return saveDocument(collectionName, dbDoc, it.getClass()).map(id -> {
|
||||
return saveDocument(collectionName, dbDoc, it.getClass()).flatMap(id -> {
|
||||
|
||||
T saved = entity.populateIdIfNecessary(id);
|
||||
return maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)).getSource();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName));
|
||||
return maybeCallAfterSave(saved, dbDoc, collectionName);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1638,9 +1638,34 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
? collection //
|
||||
: collection.withWriteConcern(writeConcernToUse);
|
||||
|
||||
Publisher<?> publisher = !mapped.hasId() //
|
||||
? collectionToUse.insertOne(document) //
|
||||
: collectionToUse.replaceOne(mapped.getIdFilter(), document, new ReplaceOptions().upsert(true));
|
||||
Publisher<?> publisher;
|
||||
if (!mapped.hasId()) {
|
||||
publisher = collectionToUse.insertOne(document);
|
||||
} else {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true);
|
||||
Document filter = updateContext.getMappedQuery(entity);
|
||||
Document replacement = updateContext.getMappedUpdate(entity);
|
||||
|
||||
Mono<Document> deferredFilter;
|
||||
|
||||
if (updateContext.requiresShardKey(filter, entity)) {
|
||||
if (entity.getShardKey().isImmutable()) {
|
||||
deferredFilter = Mono.just(updateContext.applyShardKey(entity, filter, null));
|
||||
} else {
|
||||
deferredFilter = Mono
|
||||
.from(
|
||||
collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first())
|
||||
.defaultIfEmpty(replacement).map(it -> updateContext.applyShardKey(entity, filter, it));
|
||||
}
|
||||
} else {
|
||||
deferredFilter = Mono.just(filter);
|
||||
}
|
||||
|
||||
publisher = deferredFilter.flatMapMany(
|
||||
it -> collectionToUse.replaceOne(it, replacement, updateContext.getReplaceOptions(entityClass)));
|
||||
}
|
||||
|
||||
return Mono.from(publisher).map(o -> mapped.getId());
|
||||
});
|
||||
@@ -1778,8 +1803,23 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
if (!UpdateMapper.isUpdateObject(updateObj)) {
|
||||
|
||||
Document filter = new Document(queryObj);
|
||||
Mono<Document> deferredFilter;
|
||||
|
||||
if (updateContext.requiresShardKey(filter, entity)) {
|
||||
if (entity.getShardKey().isImmutable()) {
|
||||
deferredFilter = Mono.just(updateContext.applyShardKey(entity, filter, null));
|
||||
} else {
|
||||
deferredFilter = Mono.from(
|
||||
collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first())
|
||||
.defaultIfEmpty(updateObj).map(it -> updateContext.applyShardKey(entity, filter, it));
|
||||
}
|
||||
} else {
|
||||
deferredFilter = Mono.just(filter);
|
||||
}
|
||||
|
||||
ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass);
|
||||
return collectionToUse.replaceOne(queryObj, updateObj, replaceOptions);
|
||||
return deferredFilter.flatMap(it -> Mono.from(collectionToUse.replaceOne(it, updateObj, replaceOptions)));
|
||||
}
|
||||
|
||||
return multi ? collectionToUse.updateMany(queryObj, updateObj, updateOptions)
|
||||
@@ -2031,24 +2071,25 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
FullDocument fullDocument = ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT
|
||||
: FullDocument.UPDATE_LOOKUP;
|
||||
|
||||
MongoDatabase db = StringUtils.hasText(database) ? mongoDatabaseFactory.getMongoDatabase(database)
|
||||
: getMongoDatabase();
|
||||
return ReactiveMongoDatabaseUtils.getDatabase(database, mongoDatabaseFactory) //
|
||||
.map(db -> {
|
||||
ChangeStreamPublisher<Document> publisher;
|
||||
if (StringUtils.hasText(collectionName)) {
|
||||
publisher = filter.isEmpty() ? db.getCollection(collectionName).watch(Document.class)
|
||||
: db.getCollection(collectionName).watch(filter, Document.class);
|
||||
|
||||
ChangeStreamPublisher<Document> publisher;
|
||||
if (StringUtils.hasText(collectionName)) {
|
||||
publisher = filter.isEmpty() ? db.getCollection(collectionName).watch(Document.class)
|
||||
: db.getCollection(collectionName).watch(filter, Document.class);
|
||||
} else {
|
||||
publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class);
|
||||
}
|
||||
|
||||
} else {
|
||||
publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class);
|
||||
}
|
||||
|
||||
publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter).orElse(publisher);
|
||||
publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher);
|
||||
publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher);
|
||||
publisher = publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument));
|
||||
|
||||
return Flux.from(publisher).map(document -> new ChangeStreamEvent<>(document, targetType, getConverter()));
|
||||
publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter).orElse(publisher);
|
||||
publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation)
|
||||
.orElse(publisher);
|
||||
publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher);
|
||||
return publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument));
|
||||
}) //
|
||||
.flatMapMany(publisher -> Flux.from(publisher)
|
||||
.map(document -> new ChangeStreamEvent<>(document, targetType, getConverter())));
|
||||
}
|
||||
|
||||
List<Document> prepareFilter(ChangeStreamOptions options) {
|
||||
@@ -2110,7 +2151,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
MapReducePublisher<Document> publisher = collection.mapReduce(mapFunction, reduceFunction, Document.class);
|
||||
|
||||
publisher.filter(mappedQuery);
|
||||
publisher.sort(getMappedSortObject(filterQuery, domainType));
|
||||
|
||||
Document mappedSort = getMappedSortObject(filterQuery, domainType);
|
||||
if (mappedSort != null && !mappedSort.isEmpty()) {
|
||||
publisher.sort(mappedSort);
|
||||
}
|
||||
|
||||
if (filterQuery.getMeta().getMaxTimeMsec() != null) {
|
||||
publisher.maxTime(filterQuery.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS);
|
||||
@@ -2174,7 +2219,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
publisher = collation.map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher);
|
||||
|
||||
return Flux.from(publisher)
|
||||
.map(new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName)::doWith);
|
||||
.concatMap(new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName)::doWith);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2290,7 +2335,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
LOGGER.debug("Created collection [{}]", collectionName);
|
||||
}
|
||||
|
||||
}).thenReturn(getCollection(collectionName));
|
||||
}).then(getCollection(collectionName));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2327,8 +2372,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
Class<T> entityClass, FindPublisherPreparer preparer) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
Document mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity);
|
||||
|
||||
QueryContext queryContext = queryOperations
|
||||
.createQueryContext(new BasicQuery(query, fields != null ? fields : new Document()));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s",
|
||||
@@ -2378,8 +2426,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
Document mappedFields = queryMapper.getMappedFields(fields, entity);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
@@ -2401,8 +2450,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
|
||||
|
||||
Document mappedFields = getMappedFieldsObject(fields, entity, targetClass);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, targetClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
@@ -2567,33 +2617,49 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
protected <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (null != eventPublisher) {
|
||||
if (eventPublisher != null) {
|
||||
eventPublisher.publishEvent(event);
|
||||
}
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> Mono<T> maybeCallBeforeConvert(T object, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(ReactiveBeforeConvertCallback.class, object, collection);
|
||||
}
|
||||
|
||||
return Mono.just(object);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> Mono<T> maybeCallBeforeSave(T object, Document document, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(ReactiveBeforeSaveCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return Mono.just(object);
|
||||
}
|
||||
|
||||
protected <T> Mono<T> maybeCallAfterSave(T object, Document document, String collection) {
|
||||
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(ReactiveAfterSaveCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return Mono.just(object);
|
||||
}
|
||||
|
||||
protected <T> Mono<T> maybeCallAfterConvert(T object, Document document, String collection) {
|
||||
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(ReactiveAfterConvertCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return Mono.just(object);
|
||||
}
|
||||
|
||||
private MongoCollection<Document> getAndPrepareCollection(MongoDatabase db, String collectionName) {
|
||||
|
||||
try {
|
||||
@@ -2681,7 +2747,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
DocumentCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
return createMono(collectionName,
|
||||
collection -> Mono.from(collectionCallback.doInCollection(collection)).map(objectCallback::doWith));
|
||||
collection -> Mono.from(collectionCallback.doInCollection(collection)).flatMap(objectCallback::doWith));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2707,7 +2773,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
return createFlux(collectionName, collection -> {
|
||||
return Flux.from(preparer.initiateFind(collection, collectionCallback::doInCollection))
|
||||
.map(objectCallback::doWith);
|
||||
.concatMap(objectCallback::doWith);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3003,7 +3069,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
interface DocumentCallback<T> {
|
||||
|
||||
T doWith(Document object);
|
||||
Mono<T> doWith(Document object);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -3032,6 +3098,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* {@link EntityReader}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Roman Puchkovskiy
|
||||
*/
|
||||
class ReadDocumentCallback<T> implements DocumentCallback<T> {
|
||||
|
||||
@@ -3049,27 +3116,28 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
public T doWith(@Nullable Document object) {
|
||||
public Mono<T> doWith(Document document) {
|
||||
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(object, type, collectionName));
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
|
||||
T source = reader.read(type, document);
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
return maybeCallAfterConvert(source, document, collectionName);
|
||||
}
|
||||
T source = reader.read(type, object);
|
||||
if (null != source) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(object, source, collectionName));
|
||||
}
|
||||
return source;
|
||||
|
||||
return Mono.empty();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoTemplate.DocumentCallback} transforming {@link Document} into the given {@code targetType} or
|
||||
* decorating the {@code sourceType} with a {@literal projection} in case the {@code targetType} is an
|
||||
* {@litera interface}.
|
||||
* {@link DocumentCallback} transforming {@link Document} into the given {@code targetType} or decorating the
|
||||
* {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@literal interface}.
|
||||
*
|
||||
* @param <S>
|
||||
* @param <T>
|
||||
* @author Christoph Strobl
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@@ -3080,29 +3148,25 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
private final @NonNull Class<T> targetType;
|
||||
private final @NonNull String collectionName;
|
||||
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
public T doWith(@Nullable Document object) {
|
||||
|
||||
if (object == null) {
|
||||
return null;
|
||||
}
|
||||
public Mono<T> doWith(Document document) {
|
||||
|
||||
Class<?> typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) //
|
||||
? entityType //
|
||||
: targetType;
|
||||
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(object, typeToRead, collectionName));
|
||||
}
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, typeToRead, collectionName));
|
||||
|
||||
Object source = reader.read(typeToRead, object);
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
|
||||
if (null != source) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(object, result, collectionName));
|
||||
T castEntity = (T) result;
|
||||
if (castEntity != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return maybeCallAfterConvert(castEntity, document, collectionName);
|
||||
}
|
||||
return (T) result;
|
||||
|
||||
return Mono.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3112,6 +3176,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Chrstoph Strobl
|
||||
* @author Roman Puchkovskiy
|
||||
*/
|
||||
static class GeoNearResultDocumentCallback<T> implements DocumentCallback<GeoResult<T>> {
|
||||
|
||||
@@ -3136,16 +3201,20 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
this.metric = metric;
|
||||
}
|
||||
|
||||
public GeoResult<T> doWith(Document object) {
|
||||
public Mono<GeoResult<T>> doWith(Document object) {
|
||||
|
||||
double distance = getDistance(object);
|
||||
|
||||
return delegate.doWith(object).map(doWith -> new GeoResult<>(doWith, new Distance(distance, metric)));
|
||||
}
|
||||
|
||||
double getDistance(Document object) {
|
||||
|
||||
double distance = Double.NaN;
|
||||
if (object.containsKey(distanceField)) {
|
||||
distance = NumberUtils.convertNumberToTargetClass(object.get(distanceField, Number.class), Double.class);
|
||||
return NumberUtils.convertNumberToTargetClass(object.get(distanceField, Number.class), Double.class);
|
||||
}
|
||||
|
||||
T doWith = delegate.doWith(object);
|
||||
|
||||
return new GeoResult<>(doWith, new Distance(distance, metric));
|
||||
return Double.NaN;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3163,7 +3232,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public FindPublisher<Document> prepare(FindPublisher<Document> findPublisher) {
|
||||
|
||||
FindPublisher<Document> findPublisherToUse = operations.forType(type) //
|
||||
@@ -3280,7 +3348,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoTemplate#getCollection(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoCollection<Document> getCollection(String collectionName) {
|
||||
public Mono<MongoCollection<Document>> getCollection(String collectionName) {
|
||||
|
||||
// native MongoDB objects that offer methods with ClientSession must not be proxied.
|
||||
return delegate.getCollection(collectionName);
|
||||
@@ -3291,7 +3359,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoTemplate#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase() {
|
||||
public Mono<MongoDatabase> getMongoDatabase() {
|
||||
|
||||
// native MongoDB objects that offer methods with ClientSession must not be proxied.
|
||||
return delegate.getMongoDatabase();
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -106,6 +107,18 @@ public interface ReactiveRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
interface ReactiveRemove<T> extends RemoveWithCollection<T> {}
|
||||
|
||||
@@ -15,12 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
|
||||
@@ -171,6 +172,18 @@ public interface ReactiveUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core;
|
||||
import lombok.Value;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
@@ -41,6 +42,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, ReactiveMongoDatabaseFactory {
|
||||
@@ -99,7 +101,7 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
public Mono<MongoDatabase> getMongoDatabase() throws DataAccessException {
|
||||
return getMongoDatabase(databaseName);
|
||||
}
|
||||
|
||||
@@ -107,12 +109,16 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
public Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty.");
|
||||
|
||||
MongoDatabase db = mongo.getDatabase(dbName);
|
||||
return writeConcern != null ? db.withWriteConcern(writeConcern) : db;
|
||||
return Mono.fromSupplier(() -> {
|
||||
|
||||
MongoDatabase db = mongo.getDatabase(dbName);
|
||||
|
||||
return writeConcern != null ? db.withWriteConcern(writeConcern) : db;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -135,6 +141,15 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getCodecRegistry()
|
||||
*/
|
||||
@Override
|
||||
public CodecRegistry getCodecRegistry() {
|
||||
return this.mongo.getDatabase(databaseName).getCodecRegistry();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
@@ -171,8 +186,8 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return decorateDatabase(delegate.getMongoDatabase());
|
||||
public Mono<MongoDatabase> getMongoDatabase() throws DataAccessException {
|
||||
return delegate.getMongoDatabase().map(this::decorateDatabase);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -180,8 +195,8 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return decorateDatabase(delegate.getMongoDatabase(dbName));
|
||||
public Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return delegate.getMongoDatabase(dbName).map(this::decorateDatabase);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -193,6 +208,15 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getCodecRegistry()
|
||||
*/
|
||||
@Override
|
||||
public CodecRegistry getCodecRegistry() {
|
||||
return delegate.getCodecRegistry();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
@@ -100,14 +101,14 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return value;
|
||||
}
|
||||
|
||||
protected List<Object> append(Object value) {
|
||||
protected List<Object> append(Object value, Expand expandList) {
|
||||
|
||||
if (this.value instanceof List) {
|
||||
|
||||
List<Object> clone = new ArrayList<Object>((List) this.value);
|
||||
|
||||
if (value instanceof List) {
|
||||
clone.addAll((List) value);
|
||||
if (value instanceof Collection && Expand.EXPAND_VALUES.equals(expandList)) {
|
||||
clone.addAll((Collection<?>) value);
|
||||
} else {
|
||||
clone.add(value);
|
||||
}
|
||||
@@ -117,6 +118,17 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return Arrays.asList(this.value, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expand a nested list of values to single entries or keep the list.
|
||||
*/
|
||||
protected enum Expand {
|
||||
EXPAND_VALUES, KEEP_SOURCE
|
||||
}
|
||||
|
||||
protected List<Object> append(Object value) {
|
||||
return append(value, Expand.EXPAND_VALUES);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected java.util.Map<String, Object> append(String key, Object value) {
|
||||
|
||||
|
||||
@@ -0,0 +1,200 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder.ValueAppender;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Adds new fields to documents. {@code $addFields} outputs documents that contain all existing fields from the input
|
||||
* documents and newly added fields.
|
||||
*
|
||||
* <pre class="code">
|
||||
* AddFieldsOperation.addField("totalHomework").withValue("A+").and().addField("totalQuiz").withValue("B-")
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/addFields/">MongoDB Aggregation
|
||||
* Framework: $addFields</a>
|
||||
*/
|
||||
public class AddFieldsOperation extends DocumentEnhancingOperation {
|
||||
|
||||
/**
|
||||
* Create new instance of {@link AddFieldsOperation} adding map keys as exposed fields.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
private AddFieldsOperation(Map<Object, Object> source) {
|
||||
super(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new instance of {@link AddFieldsOperation}
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @param value can be {@literal null}.
|
||||
*/
|
||||
public AddFieldsOperation(Object field, @Nullable Object value) {
|
||||
this(Collections.singletonMap(field, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the {@link AddFieldsOperation} via {@link AddFieldsOperationBuilder}.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public static AddFieldsOperationBuilder builder() {
|
||||
return new AddFieldsOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate another field to add.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public static ValueAppender addField(String field) {
|
||||
return new AddFieldsOperationBuilder().addField(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the value for a specific field to the operation.
|
||||
*
|
||||
* @param field the target field to add.
|
||||
* @param value the value to assign.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
public AddFieldsOperation addField(Object field, Object value) {
|
||||
|
||||
LinkedHashMap<Object, Object> target = new LinkedHashMap<>(getValueMap());
|
||||
target.put(field, value);
|
||||
|
||||
return new AddFieldsOperation(target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate additional fields to add.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public AddFieldsOperationBuilder and() {
|
||||
return new AddFieldsOperationBuilder(getValueMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String mongoOperator() {
|
||||
return "$addFields";
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public static class AddFieldsOperationBuilder {
|
||||
|
||||
private final Map<Object, Object> valueMap;
|
||||
|
||||
private AddFieldsOperationBuilder() {
|
||||
this.valueMap = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
private AddFieldsOperationBuilder(Map<Object, Object> source) {
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
}
|
||||
|
||||
public AddFieldsOperationBuilder addFieldWithValue(String field, @Nullable Object value) {
|
||||
return addField(field).withValue(value);
|
||||
}
|
||||
|
||||
public AddFieldsOperationBuilder addFieldWithValueOf(String field, Object value) {
|
||||
return addField(field).withValueOf(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the field to add.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return new instance of {@link ValueAppender}.
|
||||
*/
|
||||
public ValueAppender addField(String field) {
|
||||
|
||||
return new ValueAppender() {
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValue(Object value) {
|
||||
|
||||
valueMap.put(field, value);
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValueOf(Object value) {
|
||||
|
||||
valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value);
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values) {
|
||||
|
||||
valueMap.put(field, new ExpressionProjection(operation, values));
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public AddFieldsOperation build() {
|
||||
return new AddFieldsOperation(valueMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface ValueAppender {
|
||||
|
||||
/**
|
||||
* Define the value to assign as is.
|
||||
*
|
||||
* @param value can be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValue(@Nullable Object value);
|
||||
|
||||
/**
|
||||
* Define the value to assign. Plain {@link String} values are treated as {@link Field field references}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValueOf(Object value);
|
||||
|
||||
/**
|
||||
* Adds a generic projection for the current field.
|
||||
*
|
||||
* @param operation the operation key, e.g. {@code $add}.
|
||||
* @param values the values to be set for the projection operation.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,9 +23,11 @@ import java.util.List;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
@@ -117,7 +119,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationUpdate} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param operations can be {@literal empty} but must not be {@literal null}.
|
||||
* @return new instance of {@link AggregationUpdate}.
|
||||
* @since 3.0
|
||||
@@ -200,11 +202,16 @@ public class Aggregation {
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
Assert.notNull(options, "AggregationOptions must not be null!");
|
||||
|
||||
// check $out is the last operation if it exists
|
||||
// check $out/$merge is the last operation if it exists
|
||||
for (AggregationOperation aggregationOperation : aggregationOperations) {
|
||||
|
||||
if (aggregationOperation instanceof OutOperation && !isLast(aggregationOperation, aggregationOperations)) {
|
||||
throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline.");
|
||||
}
|
||||
|
||||
if (aggregationOperation instanceof MergeOperation && !isLast(aggregationOperation, aggregationOperations)) {
|
||||
throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline.");
|
||||
}
|
||||
}
|
||||
|
||||
this.operations = aggregationOperations;
|
||||
@@ -234,6 +241,20 @@ public class Aggregation {
|
||||
return "_id";
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
|
||||
* <p/>
|
||||
* Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is
|
||||
* an alias for {@code $addFields}.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
* @see AddFieldsOperation
|
||||
* @since 3.0
|
||||
*/
|
||||
public static AddFieldsOperationBuilder addFields() {
|
||||
return AddFieldsOperation.builder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given fields.
|
||||
*
|
||||
@@ -493,6 +514,30 @@ public class Aggregation {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static GeoNearOperation geoNear(NearQuery query, String distanceField) {
|
||||
return new GeoNearOperation(query, distanceField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link MergeOperationBuilder builder} instance to create a new {@link MergeOperation}.
|
||||
*
|
||||
* @return new instance of {@link MergeOperationBuilder}.
|
||||
* @see MergeOperation
|
||||
* @since 3.0
|
||||
*/
|
||||
public static MergeOperationBuilder merge() {
|
||||
return MergeOperation.builder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link OutOperation} using the given collection name. This operation must be the last operation in
|
||||
* the pipeline.
|
||||
@@ -612,6 +657,26 @@ public class Aggregation {
|
||||
return new CountOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link RedactOperation} that can restrict the content of a document based on information stored
|
||||
* within the document itself.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* Aggregation.redact(ConditionalOperators.when(Criteria.where("level").is(5)) //
|
||||
* .then(RedactOperation.PRUNE) //
|
||||
* .otherwise(RedactOperation.DESCEND));
|
||||
* </pre>
|
||||
*
|
||||
* @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or
|
||||
* {@literal $$KEEP}. Must not be {@literal null}.
|
||||
* @return new instance of {@link RedactOperation}. Never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public static RedactOperation redact(AggregationExpression condition) {
|
||||
return new RedactOperation(condition);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for the given field names.
|
||||
*
|
||||
@@ -634,19 +699,6 @@ public class Aggregation {
|
||||
return Fields.from(field(name, target));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static GeoNearOperation geoNear(NearQuery query, String distanceField) {
|
||||
return new GeoNearOperation(query, distanceField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions.Builder}.
|
||||
*
|
||||
|
||||
@@ -20,7 +20,6 @@ import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -103,4 +102,16 @@ public interface AggregationOperationContext {
|
||||
.map(PropertyDescriptor::getName) //
|
||||
.toArray(String[]::new));
|
||||
}
|
||||
|
||||
/**
|
||||
* This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for
|
||||
* its existence. Typically the {@link AggregationOperationContext} fails when referencing unknown fields, those that
|
||||
* are not present in one of the previous stages or the input source, throughout the pipeline.
|
||||
*
|
||||
* @return a more relaxed {@link AggregationOperationContext}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default AggregationOperationContext continueOnMissingFieldReference() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,7 +79,7 @@ import org.springframework.util.Assert;
|
||||
public class AggregationUpdate extends Aggregation implements UpdateDefinition {
|
||||
|
||||
private boolean isolated = false;
|
||||
private Set<String> keysTouched = new HashSet<>();
|
||||
private final Set<String> keysTouched = new HashSet<>();
|
||||
|
||||
/**
|
||||
* Create new {@link AggregationUpdate}.
|
||||
|
||||
@@ -411,7 +411,7 @@ public class ComparisonOperators {
|
||||
public Cmp compareToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Cmp(append(value));
|
||||
return new Cmp(append(value, Expand.KEEP_SOURCE));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -488,7 +488,7 @@ public class ComparisonOperators {
|
||||
public Eq equalToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Eq(append(value));
|
||||
return new Eq(append(value, Expand.KEEP_SOURCE));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -873,7 +873,7 @@ public class ComparisonOperators {
|
||||
public Ne notEqualToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Ne(append(value));
|
||||
return new Ne(append(value, Expand.KEEP_SOURCE));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,166 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Base class for common tasks required by {@link SetOperation} and {@link AddFieldsOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
abstract class DocumentEnhancingOperation implements InheritsFieldsAggregationOperation {
|
||||
|
||||
private final Map<Object, Object> valueMap;
|
||||
|
||||
private ExposedFields exposedFields = ExposedFields.empty();
|
||||
|
||||
protected DocumentEnhancingOperation(Map<Object, Object> source) {
|
||||
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
for (Object key : source.keySet()) {
|
||||
this.exposedFields = add(key);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext(
|
||||
exposedFields, context);
|
||||
|
||||
if (valueMap.size() == 1) {
|
||||
return context.getMappedObject(
|
||||
new Document(mongoOperator(), toSetEntry(valueMap.entrySet().iterator().next(), operationContext)));
|
||||
}
|
||||
|
||||
Document $set = new Document();
|
||||
valueMap.entrySet().stream().map(it -> toSetEntry(it, operationContext)).forEach($set::putAll);
|
||||
return context.getMappedObject(new Document(mongoOperator(), $set));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the String representation of the native MongoDB operator.
|
||||
*/
|
||||
protected abstract String mongoOperator();
|
||||
|
||||
/**
|
||||
* @return the raw value map
|
||||
*/
|
||||
protected Map<Object, Object> getValueMap() {
|
||||
return this.valueMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return exposedFields;
|
||||
}
|
||||
|
||||
private ExposedFields add(Object field) {
|
||||
|
||||
if (field instanceof Field) {
|
||||
return exposedFields.and(new ExposedField((Field) field, true));
|
||||
}
|
||||
if (field instanceof String) {
|
||||
return exposedFields.and(new ExposedField(Fields.field((String) field), true));
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected %s to be a field/property.", field));
|
||||
}
|
||||
|
||||
private static Document toSetEntry(Entry<Object, Object> entry, AggregationOperationContext context) {
|
||||
|
||||
String field = entry.getKey() instanceof String ? context.getReference((String) entry.getKey()).getRaw()
|
||||
: context.getReference((Field) entry.getKey()).getRaw();
|
||||
|
||||
Object value = computeValue(entry.getValue(), context);
|
||||
|
||||
return new Document(field, value);
|
||||
}
|
||||
|
||||
private static Object computeValue(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
return context.getReference((Field) value).toString();
|
||||
}
|
||||
|
||||
if (value instanceof ExpressionProjection) {
|
||||
return ((ExpressionProjection) value).toExpression(context);
|
||||
}
|
||||
|
||||
if (value instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) value).toDocument(context);
|
||||
}
|
||||
|
||||
if (value instanceof Collection) {
|
||||
return ((Collection<?>) value).stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link AggregationExpression} based on a SpEL expression.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
static class ExpressionProjection {
|
||||
|
||||
private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer();
|
||||
|
||||
private final String expression;
|
||||
private final Object[] params;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation.ExpressionProjectionOperationBuilder.ExpressionProjection} for the given
|
||||
* field, SpEL expression and parameters.
|
||||
*
|
||||
* @param expression must not be {@literal null} or empty.
|
||||
* @param parameters must not be {@literal null}.
|
||||
*/
|
||||
ExpressionProjection(String expression, Object[] parameters) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
Assert.notNull(parameters, "Parameters must not be null!");
|
||||
|
||||
this.expression = expression;
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
Object toExpression(AggregationOperationContext context) {
|
||||
return TRANSFORMER.transform(expression, context, params);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,594 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $merge}-operation.
|
||||
* <p>
|
||||
* We recommend to use the {@link MergeOperationBuilder builder} via {@link MergeOperation#builder()} instead of
|
||||
* creating instances of this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/merge/">MongoDB Documentation</a>
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MergeOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation {
|
||||
|
||||
private final MergeOperationTarget into;
|
||||
private final UniqueMergeId on;
|
||||
private final @Nullable Let let;
|
||||
private final @Nullable WhenDocumentsMatch whenMatched;
|
||||
private final @Nullable WhenDocumentsDontMatch whenNotMatched;
|
||||
|
||||
/**
|
||||
* Create new instance of {@link MergeOperation}.
|
||||
*
|
||||
* @param into the target (collection and database)
|
||||
* @param on the unique identifier. Can be {@literal null}.
|
||||
* @param let exposed variables for {@link WhenDocumentsMatch#updateWith(Aggregation)}. Can be {@literal null}.
|
||||
* @param whenMatched behavior if a result document matches an existing one in the target collection. Can be
|
||||
* {@literal null}.
|
||||
* @param whenNotMatched behavior if a result document does not match an existing one in the target collection. Can be
|
||||
* {@literal null}.
|
||||
*/
|
||||
public MergeOperation(MergeOperationTarget into, UniqueMergeId on, @Nullable Let let,
|
||||
@Nullable WhenDocumentsMatch whenMatched, @Nullable WhenDocumentsDontMatch whenNotMatched) {
|
||||
|
||||
Assert.notNull(into, "Into must not be null! Please provide a target collection.");
|
||||
Assert.notNull(on, "On must not be null! Use UniqueMergeId.id() instead.");
|
||||
|
||||
this.into = into;
|
||||
this.on = on;
|
||||
this.let = let;
|
||||
this.whenMatched = whenMatched;
|
||||
this.whenNotMatched = whenNotMatched;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simplified form to apply all default options for {@code $merge} (including writing to a collection in the same
|
||||
* database).
|
||||
*
|
||||
* @param collection the output collection within the same database.
|
||||
* @return new instance of {@link MergeOperation}.
|
||||
*/
|
||||
public static MergeOperation mergeInto(String collection) {
|
||||
return builder().intoCollection(collection).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Access the {@link MergeOperationBuilder builder API} to create a new instance of {@link MergeOperation}.
|
||||
*
|
||||
* @return new instance of {@link MergeOperationBuilder}.
|
||||
*/
|
||||
public static MergeOperationBuilder builder() {
|
||||
return new MergeOperationBuilder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Aggregation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
if (isJustCollection()) {
|
||||
return new Document("$merge", into.collection);
|
||||
}
|
||||
|
||||
Document $merge = new Document();
|
||||
$merge.putAll(into.toDocument(context));
|
||||
|
||||
if (!on.isJustIdField()) {
|
||||
$merge.putAll(on.toDocument(context));
|
||||
}
|
||||
|
||||
if (let != null) {
|
||||
$merge.append("let", let.toDocument(context).get("$let", Document.class).get("vars"));
|
||||
}
|
||||
|
||||
if (whenMatched != null) {
|
||||
$merge.putAll(whenMatched.toDocument(context));
|
||||
}
|
||||
|
||||
if (whenNotMatched != null) {
|
||||
$merge.putAll(whenNotMatched.toDocument(context));
|
||||
}
|
||||
|
||||
return new Document("$merge", $merge);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
if (let == null) {
|
||||
return ExposedFields.from();
|
||||
}
|
||||
|
||||
return ExposedFields.synthetic(Fields.fields(let.getVariableNames()));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation#inheritsFields()
|
||||
*/
|
||||
@Override
|
||||
public boolean inheritsFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if nothing more than the collection is specified.
|
||||
*/
|
||||
private boolean isJustCollection() {
|
||||
return into.isTargetingSameDatabase() && on.isJustIdField() && let == null && whenMatched == null
|
||||
&& whenNotMatched == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object representing the unique id used during the merge operation to identify duplicates in the target
|
||||
* collection.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class UniqueMergeId {
|
||||
|
||||
private static final UniqueMergeId ID = new UniqueMergeId(Collections.emptyList());
|
||||
|
||||
private final Collection<String> uniqueIdentifier;
|
||||
|
||||
private UniqueMergeId(Collection<String> uniqueIdentifier) {
|
||||
this.uniqueIdentifier = uniqueIdentifier;
|
||||
}
|
||||
|
||||
public static UniqueMergeId ofIdFields(String... fields) {
|
||||
|
||||
Assert.noNullElements(fields, "Fields must not contain null values!");
|
||||
|
||||
if (ObjectUtils.isEmpty(fields)) {
|
||||
return id();
|
||||
}
|
||||
|
||||
return new UniqueMergeId(Arrays.asList(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge Documents by using the MongoDB {@literal _id} field.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static UniqueMergeId id() {
|
||||
return ID;
|
||||
}
|
||||
|
||||
boolean isJustIdField() {
|
||||
return this.equals(ID);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
List<String> mappedOn = uniqueIdentifier.stream().map(context::getReference).map(FieldReference::getRaw)
|
||||
.collect(Collectors.toList());
|
||||
return new Document("on", mappedOn.size() == 1 ? mappedOn.iterator().next() : mappedOn);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object representing the {@code into} field of a {@code $merge} aggregation stage. <br />
|
||||
* If not stated explicitly via {@link MergeOperationTarget#inDatabase(String)} the {@literal collection} is created
|
||||
* in the very same {@literal database}. In this case {@code into} is just a single String holding the collection
|
||||
* name. <br />
|
||||
*
|
||||
* <pre class="code">
|
||||
* into: "target-collection-name"
|
||||
* </pre>
|
||||
*
|
||||
* If the collection needs to be in a different database {@code into} will be a {@link Document} like the following
|
||||
*
|
||||
* <pre class="code">
|
||||
* {
|
||||
* into: {}
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class MergeOperationTarget {
|
||||
|
||||
private final @Nullable String database;
|
||||
private final String collection;
|
||||
|
||||
private MergeOperationTarget(@Nullable String database, String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
this.database = database;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param collection The output collection results will be stored in. Must not be {@literal null}.
|
||||
* @return new instance of {@link MergeOperationTarget}.
|
||||
*/
|
||||
public static MergeOperationTarget collection(String collection) {
|
||||
return new MergeOperationTarget(null, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the target database if different from the source one.
|
||||
*
|
||||
* @param database must not be {@literal null}.
|
||||
* @return new instance of {@link MergeOperationTarget}.
|
||||
*/
|
||||
public MergeOperationTarget inDatabase(String database) {
|
||||
return new MergeOperationTarget(database, collection);
|
||||
}
|
||||
|
||||
boolean isTargetingSameDatabase() {
|
||||
return !StringUtils.hasText(database);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
return new Document("into",
|
||||
!StringUtils.hasText(database) ? collection : new Document("db", database).append("coll", collection));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object specifying how to deal with a result document that matches an existing document in the collection
|
||||
* based on the fields of the {@code on} property describing the unique identifier.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class WhenDocumentsMatch {
|
||||
|
||||
private final Object value;
|
||||
|
||||
private WhenDocumentsMatch(Object value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static WhenDocumentsMatch whenMatchedOf(String value) {
|
||||
return new WhenDocumentsMatch(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the existing document in the output collection with the matching results document.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch replaceDocument() {
|
||||
return whenMatchedOf("replace");
|
||||
}
|
||||
|
||||
/**
|
||||
* Keep the existing document in the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch keepExistingDocument() {
|
||||
return whenMatchedOf("keepExisting");
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge the matching documents. Please see the MongoDB reference documentation for details.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch mergeDocuments() {
|
||||
return whenMatchedOf("merge");
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop and fail the aggregation operation. Does not revert already performed changes on previous documents.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch failOnMatch() {
|
||||
return whenMatchedOf("fail");
|
||||
}
|
||||
|
||||
/**
|
||||
* Use an {@link Aggregation} to update the document in the collection. Please see the MongoDB reference
|
||||
* documentation for details.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch updateWith(Aggregation aggregation) {
|
||||
return new WhenDocumentsMatch(aggregation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use an aggregation pipeline to update the document in the collection. Please see the MongoDB reference
|
||||
* documentation for details.
|
||||
*
|
||||
* @param aggregationPipeline must not be {@literal null}.
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch updateWith(List<AggregationOperation> aggregationPipeline) {
|
||||
return new WhenDocumentsMatch(aggregationPipeline);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Aggregation) {
|
||||
return new Document("whenMatched", ((Aggregation) value).toPipeline(context));
|
||||
}
|
||||
|
||||
return new Document("whenMatched", value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object specifying how to deal with a result document that do not match an existing document in the collection
|
||||
* based on the fields of the {@code on} property describing the unique identifier.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class WhenDocumentsDontMatch {
|
||||
|
||||
private final String value;
|
||||
|
||||
private WhenDocumentsDontMatch(String value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method creating {@link WhenDocumentsDontMatch} from a {@code value} literal.
|
||||
*
|
||||
* @param value
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch whenNotMatchedOf(String value) {
|
||||
return new WhenDocumentsDontMatch(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert the document into the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch insertNewDocument() {
|
||||
return whenNotMatchedOf("insert");
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard the document - do not insert the document into the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch discardDocument() {
|
||||
return whenNotMatchedOf("discard");
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop and fail the aggregation operation. Does not revert already performed changes on previous documents.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static WhenDocumentsDontMatch failWhenNotMatch() {
|
||||
return whenNotMatchedOf("fail");
|
||||
}
|
||||
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("whenNotMatched", value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder API to construct a {@link MergeOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class MergeOperationBuilder {
|
||||
|
||||
private String collection;
|
||||
private @Nullable String database;
|
||||
private UniqueMergeId id = UniqueMergeId.id();
|
||||
private @Nullable Let let;
|
||||
private @Nullable WhenDocumentsMatch whenMatched;
|
||||
private @Nullable WhenDocumentsDontMatch whenNotMatched;
|
||||
|
||||
public MergeOperationBuilder() {}
|
||||
|
||||
/**
|
||||
* Required output collection name to store results to.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor empty.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder intoCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
this.collection = collection;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally define a target database if different from the current one.
|
||||
*
|
||||
* @param database must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder inDatabase(String database) {
|
||||
|
||||
this.database = database;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the target to store results in.
|
||||
*
|
||||
* @param into must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder into(MergeOperationTarget into) {
|
||||
|
||||
this.database = into.database;
|
||||
this.collection = into.collection;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the target to store results in.
|
||||
*
|
||||
* @param target must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder target(MergeOperationTarget target) {
|
||||
return into(target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a single field or multiple fields that act as a unique identifier for a document. The identifier
|
||||
* determines if a results document matches an already existing document in the output collection. <br />
|
||||
* The aggregation results documents must contain the field(s) specified via {@code on}, unless it's the {@code _id}
|
||||
* field.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder on(String... fields) {
|
||||
return id(UniqueMergeId.ofIdFields(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the identifier that determines if a results document matches an already existing document in the output
|
||||
* collection.
|
||||
*
|
||||
* @param id must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder id(UniqueMergeId id) {
|
||||
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update
|
||||
* aggregation}.
|
||||
*
|
||||
* @param let the variable expressions
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder let(Let let) {
|
||||
|
||||
this.let = let;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update
|
||||
* aggregation}.
|
||||
*
|
||||
* @param let the variable expressions
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder exposeVariablesOf(Let let) {
|
||||
return let(let);
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param whenMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenMatched(WhenDocumentsMatch whenMatched) {
|
||||
|
||||
this.whenMatched = whenMatched;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param whenMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsMatch(WhenDocumentsMatch whenMatched) {
|
||||
return whenMatched(whenMatched);
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link Aggregation action} to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsMatchApply(Aggregation aggregation) {
|
||||
return whenMatched(WhenDocumentsMatch.updateWith(aggregation));
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents do not already exist in the target collection.
|
||||
*
|
||||
* @param whenNotMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenNotMatched(WhenDocumentsDontMatch whenNotMatched) {
|
||||
|
||||
this.whenNotMatched = whenNotMatched;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents do not already exist in the target collection.
|
||||
*
|
||||
* @param whenNotMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsDontMatch(WhenDocumentsDontMatch whenNotMatched) {
|
||||
return whenNotMatched(whenNotMatched);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link MergeOperation}.
|
||||
*/
|
||||
public MergeOperation build() {
|
||||
return new MergeOperation(new MergeOperationTarget(database, collection), id, let, whenMatched, whenNotMatched);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,243 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link RedactOperation} allows to restrict the content of a {@link Document} based on information stored within
|
||||
* itself.
|
||||
*
|
||||
* <pre class="code">
|
||||
* RedactOperation.builder() //
|
||||
* .when(Criteria.where("level").is(5)) //
|
||||
* .thenPrune() //
|
||||
* .otherwiseDescend() //
|
||||
* .build();
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/redact/">https://docs.mongodb.com/manual/reference/operator/aggregation/redact/</a>
|
||||
* @since 3.0
|
||||
*/
|
||||
public class RedactOperation implements AggregationOperation {
|
||||
|
||||
/**
|
||||
* Return fields at the current document level. Exclude embedded ones.
|
||||
*/
|
||||
public static final String DESCEND = "$$DESCEND";
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level.
|
||||
*/
|
||||
public static final String KEEP = "$$KEEP";
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level.
|
||||
*/
|
||||
public static final String PRUNE = "$$PRUNE";
|
||||
|
||||
private final AggregationExpression condition;
|
||||
|
||||
/**
|
||||
* Create new {@link RedactOperation}.
|
||||
*
|
||||
* @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or
|
||||
* {@literal $$KEEP}. Must not be {@literal null}.
|
||||
*/
|
||||
public RedactOperation(AggregationExpression condition) {
|
||||
|
||||
Assert.notNull(condition, "Condition must not be null!");
|
||||
this.condition = condition;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$redact", condition.toDocument(context));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a new instance of {@link RedactOperationBuilder} to specify condition and outcome of the {@literal $redact}
|
||||
* operation.
|
||||
*
|
||||
* @return new instance of {@link RedactOperationBuilder}.
|
||||
*/
|
||||
public static RedactOperationBuilder builder() {
|
||||
return new RedactOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder to create new instance of {@link RedactOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class RedactOperationBuilder {
|
||||
|
||||
private Object when;
|
||||
private Object then;
|
||||
private Object otherwise;
|
||||
|
||||
private RedactOperationBuilder() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(CriteriaDefinition criteria) {
|
||||
|
||||
this.when = criteria;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param condition must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(AggregationExpression condition) {
|
||||
|
||||
this.when = condition;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param condition must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(Document condition) {
|
||||
|
||||
this.when = condition;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return fields at the current document level and exclude embedded ones if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenDescend() {
|
||||
return then(DESCEND);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenKeep() {
|
||||
return then(KEEP);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenPrune() {
|
||||
return then(PRUNE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP})
|
||||
* when the condition is met.
|
||||
*
|
||||
* @param then must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder then(Object then) {
|
||||
|
||||
this.then = then;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return fields at the current document level and exclude embedded ones if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwiseDescend() {
|
||||
return otherwise(DESCEND);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwiseKeep() {
|
||||
return otherwise(KEEP);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwisePrune() {
|
||||
return otherwise(PRUNE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP})
|
||||
* when the condition is not met.
|
||||
*
|
||||
* @param otherwise must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwise(Object otherwise) {
|
||||
this.otherwise = otherwise;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link RedactOperation}.
|
||||
*/
|
||||
public RedactOperation build() {
|
||||
return new RedactOperation(when().then(then).otherwise(otherwise));
|
||||
}
|
||||
|
||||
private ThenBuilder when() {
|
||||
|
||||
if (when instanceof CriteriaDefinition) {
|
||||
return ConditionalOperators.Cond.when((CriteriaDefinition) when);
|
||||
}
|
||||
if (when instanceof AggregationExpression) {
|
||||
return ConditionalOperators.Cond.when((AggregationExpression) when);
|
||||
}
|
||||
if (when instanceof Document) {
|
||||
return ConditionalOperators.Cond.when((Document) when);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Invalid Condition. Expected CriteriaDefinition, AggregationExpression or Document but was %s.", when));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,23 +15,17 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.SetOperation.FieldAppender.ValueAppender;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input
|
||||
* documents and newly added fields.
|
||||
*
|
||||
*
|
||||
* <pre class="code">
|
||||
* SetOperation.set("totalHomework").toValue("A+").and().set("totalQuiz").toValue("B-")
|
||||
* </pre>
|
||||
@@ -41,10 +35,7 @@ import org.springframework.lang.Nullable;
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/set/">MongoDB Aggregation Framework:
|
||||
* $set</a>
|
||||
*/
|
||||
public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
|
||||
private Map<Object, Object> valueMap;
|
||||
private ExposedFields exposedFields = ExposedFields.empty();
|
||||
public class SetOperation extends DocumentEnhancingOperation {
|
||||
|
||||
/**
|
||||
* Create new instance of {@link SetOperation} adding map keys as exposed fields.
|
||||
@@ -52,11 +43,7 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
private SetOperation(Map<Object, Object> source) {
|
||||
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
for (Object key : source.keySet()) {
|
||||
this.exposedFields = add(key);
|
||||
}
|
||||
super(source);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -97,7 +84,7 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
*/
|
||||
public SetOperation set(Object field, Object value) {
|
||||
|
||||
LinkedHashMap<Object, Object> target = new LinkedHashMap<>(this.valueMap);
|
||||
LinkedHashMap<Object, Object> target = new LinkedHashMap<>(getValueMap());
|
||||
target.put(field, value);
|
||||
|
||||
return new SetOperation(target);
|
||||
@@ -109,73 +96,12 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
* @return new instance of {@link FieldAppender}.
|
||||
*/
|
||||
public FieldAppender and() {
|
||||
return new FieldAppender(this.valueMap);
|
||||
return new FieldAppender(getValueMap());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext(
|
||||
exposedFields, context);
|
||||
|
||||
if (valueMap.size() == 1) {
|
||||
return context
|
||||
.getMappedObject(new Document("$set", toSetEntry(valueMap.entrySet().iterator().next(), operationContext)));
|
||||
}
|
||||
|
||||
Document $set = new Document();
|
||||
valueMap.entrySet().stream().map(it -> toSetEntry(it, operationContext)).forEach($set::putAll);
|
||||
return context.getMappedObject(new Document("$set", $set));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return exposedFields;
|
||||
}
|
||||
|
||||
private ExposedFields add(Object field) {
|
||||
|
||||
if (field instanceof Field) {
|
||||
return exposedFields.and(new ExposedField((Field) field, true));
|
||||
}
|
||||
if (field instanceof String) {
|
||||
return exposedFields.and(new ExposedField(Fields.field((String) field), true));
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected %s to be a field/property.", field));
|
||||
}
|
||||
|
||||
private static Document toSetEntry(Entry<Object, Object> entry, AggregationOperationContext context) {
|
||||
|
||||
String field = entry.getKey() instanceof String ? context.getReference((String) entry.getKey()).getRaw()
|
||||
: context.getReference((Field) entry.getKey()).getRaw();
|
||||
|
||||
Object value = computeValue(entry.getValue(), context);
|
||||
|
||||
return new Document(field, value);
|
||||
}
|
||||
|
||||
private static Object computeValue(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
return context.getReference((Field) value).toString();
|
||||
}
|
||||
if (value instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) value).toDocument(context);
|
||||
}
|
||||
if (value instanceof Collection) {
|
||||
return ((Collection) value).stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
return value;
|
||||
protected String mongoOperator() {
|
||||
return "$set";
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -217,6 +143,13 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value);
|
||||
return FieldAppender.this.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SetOperation withValueOfExpression(String operation, Object... values) {
|
||||
|
||||
valueMap.put(field, new ExpressionProjection(operation, values));
|
||||
return FieldAppender.this.build();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -226,6 +159,7 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface ValueAppender {
|
||||
@@ -245,6 +179,15 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
* @return new instance of {@link SetOperation}.
|
||||
*/
|
||||
SetOperation toValueOf(Object value);
|
||||
|
||||
/**
|
||||
* Adds a generic projection for the current field.
|
||||
*
|
||||
* @param operation the operation key, e.g. {@code $add}.
|
||||
* @param values the values to be set for the projection operation.
|
||||
* @return new instance of {@link SetOperation}.
|
||||
*/
|
||||
SetOperation withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,6 +127,15 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return Fields.fields(fields.toArray(new String[0]));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#continueOnMissingFieldReference()
|
||||
*/
|
||||
@Override
|
||||
public AggregationOperationContext continueOnMissingFieldReference() {
|
||||
return new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, mapper);
|
||||
}
|
||||
|
||||
protected FieldReference getReferenceFor(Field field) {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext
|
||||
|
||||
@@ -296,7 +296,7 @@ public class VariableOperators {
|
||||
return toLet(ExposedFields.synthetic(Fields.fields(getVariableNames())), context);
|
||||
}
|
||||
|
||||
private String[] getVariableNames() {
|
||||
String[] getVariableNames() {
|
||||
|
||||
String[] varNames = new String[this.vars.size()];
|
||||
for (int i = 0; i < this.vars.size(); i++) {
|
||||
|
||||
@@ -37,7 +37,6 @@ import org.bson.json.JsonReader;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
@@ -50,6 +49,7 @@ import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PreferredConstructor.Parameter;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator;
|
||||
@@ -64,6 +64,7 @@ import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
@@ -91,6 +92,8 @@ import com.mongodb.DBRef;
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
* @author Mark Paluch
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Heesu Jung
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
@@ -110,6 +113,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private SpELContext spELContext;
|
||||
private @Nullable EntityCallbacks entityCallbacks;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}.
|
||||
@@ -212,6 +216,26 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
this.applicationContext = applicationContext;
|
||||
this.spELContext = new SpELContext(this.spELContext, applicationContext);
|
||||
|
||||
if (entityCallbacks == null) {
|
||||
setEntityCallbacks(EntityCallbacks.create(applicationContext));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link EntityCallbacks} instance to use when invoking
|
||||
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link AfterConvertCallback}.
|
||||
* <p />
|
||||
* Overrides potentially existing {@link EntityCallbacks}.
|
||||
*
|
||||
* @param entityCallbacks must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if the given instance is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public void setEntityCallbacks(EntityCallbacks entityCallbacks) {
|
||||
|
||||
Assert.notNull(entityCallbacks, "EntityCallbacks must not be null!");
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -228,11 +252,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private <S extends Object> S read(TypeInformation<S> type, @Nullable Bson bson, ObjectPath path) {
|
||||
private <S extends Object> S read(TypeInformation<S> type, Bson bson, ObjectPath path) {
|
||||
|
||||
if (null == bson) {
|
||||
return null;
|
||||
}
|
||||
Assert.notNull(bson, "Bson must not be null!");
|
||||
|
||||
TypeInformation<? extends S> typeToUse = typeMapper.readType(bson, type);
|
||||
Class<? extends S> rawType = typeToUse.getType();
|
||||
@@ -1259,9 +1281,16 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (conversions.isSimpleType(obj.getClass())) {
|
||||
// Doesn't need conversion
|
||||
return getPotentiallyConvertedSimpleWrite(obj,
|
||||
typeInformation != null ? typeInformation.getType() : Object.class);
|
||||
|
||||
Class<?> conversionTargetType;
|
||||
|
||||
if (typeInformation != null && conversions.isSimpleType(typeInformation.getType())) {
|
||||
conversionTargetType = typeInformation.getType();
|
||||
} else {
|
||||
conversionTargetType = Object.class;
|
||||
}
|
||||
|
||||
return getPotentiallyConvertedSimpleWrite(obj, conversionTargetType);
|
||||
}
|
||||
|
||||
if (obj instanceof List) {
|
||||
@@ -1571,7 +1600,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@Nullable
|
||||
private <T> T readAndConvertDBRef(@Nullable DBRef dbref, TypeInformation<?> type, ObjectPath path,
|
||||
final Class<?> rawType) {
|
||||
@Nullable Class<?> rawType) {
|
||||
|
||||
List<T> result = bulkReadAndConvertDBRefs(Collections.singletonList(dbref), type, path, rawType);
|
||||
return CollectionUtils.isEmpty(result) ? null : result.iterator().next();
|
||||
@@ -1594,7 +1623,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> List<T> bulkReadAndConvertDBRefs(List<DBRef> dbrefs, TypeInformation<?> type, ObjectPath path,
|
||||
final Class<?> rawType) {
|
||||
@Nullable Class<?> rawType) {
|
||||
|
||||
if (CollectionUtils.isEmpty(dbrefs)) {
|
||||
return Collections.emptyList();
|
||||
@@ -1605,23 +1634,27 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
: bulkReadRefs(dbrefs);
|
||||
String collectionName = dbrefs.iterator().next().getCollectionName();
|
||||
|
||||
List<T> targeList = new ArrayList<>(dbrefs.size());
|
||||
List<T> targetList = new ArrayList<>(dbrefs.size());
|
||||
|
||||
for (Document document : referencedRawDocuments) {
|
||||
|
||||
T target = null;
|
||||
if (document != null) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, (Class<T>) rawType, collectionName));
|
||||
}
|
||||
|
||||
final T target = (T) read(type, document, path);
|
||||
targeList.add(target);
|
||||
maybeEmitEvent(
|
||||
new AfterLoadEvent<>(document, (Class<T>) (rawType != null ? rawType : Object.class), collectionName));
|
||||
target = (T) read(type, document, path);
|
||||
}
|
||||
|
||||
if (target != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, target, collectionName));
|
||||
target = maybeCallAfterConvert(target, document, collectionName);
|
||||
}
|
||||
|
||||
targetList.add(target);
|
||||
}
|
||||
|
||||
return targeList;
|
||||
return targetList;
|
||||
}
|
||||
|
||||
private void maybeEmitEvent(MongoMappingEvent<?> event) {
|
||||
@@ -1635,6 +1668,15 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return this.applicationContext != null;
|
||||
}
|
||||
|
||||
protected <T> T maybeCallAfterConvert(T object, Document document, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the fetch operation for the given {@link DBRef}.
|
||||
*
|
||||
|
||||
@@ -24,7 +24,6 @@ import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -175,7 +174,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
Document mappedSort = new Document();
|
||||
for(Map.Entry<String,Object> entry : BsonUtils.asMap(sortObject).entrySet()) {
|
||||
for (Map.Entry<String, Object> entry : BsonUtils.asMap(sortObject).entrySet()) {
|
||||
|
||||
Field field = createPropertyField(entity, entry.getKey(), mappingContext);
|
||||
mappedSort.put(field.getMappedKey(), entry.getValue());
|
||||
@@ -420,7 +419,7 @@ public class QueryMapper {
|
||||
return false;
|
||||
}
|
||||
|
||||
Class<? extends Object> type = value.getClass();
|
||||
Class<?> type = value.getClass();
|
||||
MongoPersistentProperty property = documentField.getProperty();
|
||||
|
||||
if (property.getActualType().isAssignableFrom(type)) {
|
||||
@@ -444,7 +443,7 @@ public class QueryMapper {
|
||||
protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof Example) {
|
||||
return exampleMapper.getMappedExample((Example) source, entity);
|
||||
return exampleMapper.getMappedExample((Example<?>) source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof List) {
|
||||
@@ -923,6 +922,8 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?|\\.\\d+");
|
||||
private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+");
|
||||
private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!";
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
@@ -964,7 +965,7 @@ public class QueryMapper {
|
||||
this.entity = entity;
|
||||
this.mappingContext = context;
|
||||
|
||||
this.path = getPath(name);
|
||||
this.path = getPath(removePlaceholders(POSITIONAL_PARAMETER_PATTERN, name));
|
||||
this.property = path == null ? property : path.getLeafProperty();
|
||||
this.association = findAssociation();
|
||||
}
|
||||
@@ -1072,7 +1073,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link PersistentPropertyPath} for the given <code>pathExpression</code>.
|
||||
* Returns the {@link PersistentPropertyPath} for the given {@code pathExpression}.
|
||||
*
|
||||
* @param pathExpression
|
||||
* @return
|
||||
@@ -1080,8 +1081,8 @@ public class QueryMapper {
|
||||
@Nullable
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
String rawPath = pathExpression.replaceAll("\\.\\d+", "") //
|
||||
.replaceAll(POSITIONAL_OPERATOR.pattern(), "");
|
||||
String rawPath = removePlaceholders(POSITIONAL_OPERATOR,
|
||||
removePlaceholders(DOT_POSITIONAL_PATTERN, pathExpression));
|
||||
|
||||
PropertyPath path = forName(rawPath);
|
||||
if (path == null || isPathToJavaLangClassProperty(path)) {
|
||||
@@ -1158,7 +1159,7 @@ public class QueryMapper {
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name);
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1169,7 +1170,15 @@ public class QueryMapper {
|
||||
* @since 1.7
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new AssociationConverter(getAssociation());
|
||||
return new AssociationConverter(name, getAssociation());
|
||||
}
|
||||
|
||||
protected MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> getMappingContext() {
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
private static String removePlaceholders(Pattern pattern, String raw) {
|
||||
return pattern.matcher(raw).replaceAll("");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1180,8 +1189,9 @@ public class QueryMapper {
|
||||
|
||||
private final KeyMapper keyMapper;
|
||||
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey) {
|
||||
this.keyMapper = new KeyMapper(rawKey);
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> ctx) {
|
||||
this.keyMapper = new KeyMapper(rawKey, ctx);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1223,7 +1233,8 @@ public class QueryMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
public KeyMapper(String key) {
|
||||
public KeyMapper(String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.iterator = Arrays.asList(key.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
@@ -1243,6 +1254,7 @@ public class QueryMapper {
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
|
||||
if (isPositional) {
|
||||
@@ -1255,7 +1267,7 @@ public class QueryMapper {
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
private static boolean isPositionalParameter(String partial) {
|
||||
static boolean isPositionalParameter(String partial) {
|
||||
|
||||
if ("$".equals(partial)) {
|
||||
return true;
|
||||
@@ -1283,6 +1295,7 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class AssociationConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final String name;
|
||||
private final MongoPersistentProperty property;
|
||||
private boolean associationFound;
|
||||
|
||||
@@ -1291,10 +1304,11 @@ public class QueryMapper {
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public AssociationConverter(Association<MongoPersistentProperty> association) {
|
||||
public AssociationConverter(String name, Association<MongoPersistentProperty> association) {
|
||||
|
||||
Assert.notNull(association, "Association must not be null!");
|
||||
this.property = association.getInverse();
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1312,6 +1326,12 @@ public class QueryMapper {
|
||||
associationFound = true;
|
||||
}
|
||||
|
||||
if (associationFound) {
|
||||
if (name.endsWith("$") && property.isCollectionLike()) {
|
||||
return source.getFieldName() + ".$";
|
||||
}
|
||||
}
|
||||
|
||||
return source.getFieldName();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -272,6 +272,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class MetadataBackedUpdateField extends MetadataBackedField {
|
||||
|
||||
@@ -289,7 +290,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$(\\[.*\\])?", ""), entity, mappingContext);
|
||||
super(key, entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
@@ -308,7 +309,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key);
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key, getMappingContext());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -317,7 +318,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new UpdateAssociationConverter(getAssociation(), key);
|
||||
return new UpdateAssociationConverter(getMappingContext(), getAssociation(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -334,10 +335,12 @@ public class UpdateMapper extends QueryMapper {
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public UpdateAssociationConverter(Association<MongoPersistentProperty> association, String key) {
|
||||
public UpdateAssociationConverter(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
Association<MongoPersistentProperty> association, String key) {
|
||||
|
||||
super(association);
|
||||
this.mapper = new KeyMapper(key);
|
||||
super(key, association);
|
||||
this.mapper = new KeyMapper(key, mappingContext);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class JustOnceLogger {
|
||||
|
||||
private static final Map<String, Set<String>> KNOWN_LOGS = new ConcurrentHashMap<>();
|
||||
private static final String AUTO_INDEX_CREATION_CONFIG_CHANGE;
|
||||
|
||||
static {
|
||||
AUTO_INDEX_CREATION_CONFIG_CHANGE = "Automatic index creation will be disabled by default as of Spring Data MongoDB 3.x."
|
||||
+ System.lineSeparator()
|
||||
+ "\tPlease use 'MongoMappingContext#setAutoIndexCreation(boolean)' or override 'MongoConfigurationSupport#autoIndexCreation()' to be explicit."
|
||||
+ System.lineSeparator()
|
||||
+ "\tHowever, we recommend setting up indices manually in an application ready block. You may use index derivation there as well."
|
||||
+ System.lineSeparator() + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> @EventListener(ApplicationReadyEvent.class)" + System.lineSeparator() //
|
||||
+ "\t> public void initIndicesAfterStartup() {" + System.lineSeparator() //
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class);" + System.lineSeparator()//
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexResolver resolver = new MongoPersistentEntityIndexResolver(mongoMappingContext);"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex);" + System.lineSeparator() //
|
||||
+ "\t> }" + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator();
|
||||
}
|
||||
|
||||
static void logWarnIndexCreationConfigurationChange(String loggerName) {
|
||||
warnOnce(loggerName, AUTO_INDEX_CREATION_CONFIG_CHANGE);
|
||||
}
|
||||
|
||||
static void warnOnce(String loggerName, String message) {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(loggerName);
|
||||
if (!logger.isWarnEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!KNOWN_LOGS.containsKey(loggerName)) {
|
||||
|
||||
KNOWN_LOGS.put(loggerName, new ConcurrentSkipListSet<>(Collections.singleton(message)));
|
||||
logger.warn(message);
|
||||
} else {
|
||||
|
||||
Set<String> messages = KNOWN_LOGS.get(loggerName);
|
||||
if (messages.contains(message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
messages.add(message);
|
||||
logger.warn(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -139,8 +139,6 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder
|
||||
? (IndexDefinitionHolder) indexDefinition
|
||||
: new IndexDefinitionHolder("", indexDefinition, collection);
|
||||
|
||||
@@ -142,8 +142,6 @@ public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
|
||||
Mono<String> createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
return operationsProvider.indexOps(indexDefinition.getCollection()).ensureIndex(indexDefinition) //
|
||||
.onErrorResume(ReactiveMongoPersistentEntityIndexCreator::isDataIntegrityViolation,
|
||||
e -> translateException(e, indexDefinition));
|
||||
|
||||
@@ -37,6 +37,7 @@ import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -63,6 +64,8 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
private final @Nullable String collation;
|
||||
private final @Nullable Expression collationExpression;
|
||||
|
||||
private final ShardKey shardKey;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the
|
||||
* collection name to the entities simple type name.
|
||||
@@ -92,6 +95,27 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
this.collation = null;
|
||||
this.collationExpression = null;
|
||||
}
|
||||
|
||||
this.shardKey = detectShardKey();
|
||||
}
|
||||
|
||||
private ShardKey detectShardKey() {
|
||||
|
||||
if (!isAnnotationPresent(Sharded.class)) {
|
||||
return ShardKey.none();
|
||||
}
|
||||
|
||||
Sharded sharded = getRequiredAnnotation(Sharded.class);
|
||||
|
||||
String[] keyProperties = sharded.shardKey();
|
||||
if (ObjectUtils.isEmpty(keyProperties)) {
|
||||
keyProperties = new String[] { "_id" };
|
||||
}
|
||||
|
||||
ShardKey shardKey = ShardingStrategy.HASH.equals(sharded.shardingStrategy()) ? ShardKey.hash(keyProperties)
|
||||
: ShardKey.range(keyProperties);
|
||||
|
||||
return sharded.immutableKey() ? ShardKey.immutable(shardKey) : shardKey;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -160,6 +184,11 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
: null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ShardKey getShardKey() {
|
||||
return shardKey;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.BasicPersistentEntity#verify()
|
||||
|
||||
@@ -42,8 +42,7 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
private static final FieldNamingStrategy DEFAULT_NAMING_STRATEGY = PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
|
||||
private FieldNamingStrategy fieldNamingStrategy = DEFAULT_NAMING_STRATEGY;
|
||||
private @Nullable ApplicationContext context;
|
||||
private boolean autoIndexCreation = true;
|
||||
private boolean autoIndexCreation = false;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoMappingContext}.
|
||||
@@ -99,8 +98,6 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
|
||||
super.setApplicationContext(applicationContext);
|
||||
|
||||
this.context = applicationContext;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -108,7 +105,8 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
* <strong>NOTE:</strong>Index creation should happen at a well-defined time that is ideally controlled by the
|
||||
* application itself.
|
||||
*
|
||||
* @return {@literal true} when auto-index creation is enabled; {@literal false} otherwise.
|
||||
* @return {@literal true} when auto-index creation is enabled; {@literal false} otherwise. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will is set to {@literal false} was {@literal true} in 2.x.
|
||||
* @since 2.2
|
||||
* @see org.springframework.data.mongodb.core.index.Indexed
|
||||
*/
|
||||
@@ -121,7 +119,7 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
* <strong>NOTE:</strong>Index creation should happen at a well-defined time that is ideally controlled by the
|
||||
* application itself.
|
||||
*
|
||||
* @param autoCreateIndexes set to {@literal false} to disable auto-index creation.
|
||||
* @param autoCreateIndexes set to {@literal true} to enable auto-index creation.
|
||||
* @since 2.2
|
||||
* @see org.springframework.data.mongodb.core.index.Indexed
|
||||
*/
|
||||
|
||||
@@ -77,4 +77,20 @@ public interface MongoPersistentEntity<T> extends PersistentEntity<T, MongoPersi
|
||||
return getCollation() != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the entities shard key if defined.
|
||||
*
|
||||
* @return {@link ShardKey#none()} if not not set.
|
||||
* @since 3.0
|
||||
*/
|
||||
ShardKey getShardKey();
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link #getShardKey() shard key} is sharded.
|
||||
* @since 3.0
|
||||
*/
|
||||
default boolean isSharded() {
|
||||
return getShardKey().isSharded();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Value object representing an entities <a href="https://docs.mongodb.com/manual/core/sharding-shard-key/">Shard
|
||||
* Key</a> used to distribute documents across a sharded MongoDB cluster.
|
||||
* <p />
|
||||
* {@link ShardKey#isImmutable() Immutable} shard keys indicates a fixed value that is not updated (see
|
||||
* <a href="https://docs.mongodb.com/manual/core/sharding-shard-key/#change-a-document-s-shard-key-value">MongoDB
|
||||
* Reference: Change a Document's Shard Key Value</a>), which allows to skip server round trips in cases where a
|
||||
* potential shard key change might have occurred.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ShardKey {
|
||||
|
||||
private static final ShardKey NONE = new ShardKey(Collections.emptyList(), null, true);
|
||||
|
||||
private final List<String> propertyNames;
|
||||
private final @Nullable ShardingStrategy shardingStrategy;
|
||||
private final boolean immutable;
|
||||
|
||||
private ShardKey(List<String> propertyNames, @Nullable ShardingStrategy shardingStrategy, boolean immutable) {
|
||||
|
||||
this.propertyNames = propertyNames;
|
||||
this.shardingStrategy = shardingStrategy;
|
||||
this.immutable = immutable;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of properties used to form the shard key.
|
||||
*/
|
||||
public int size() {
|
||||
return propertyNames.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the unmodifiable collection of property names forming the shard key.
|
||||
*/
|
||||
public Collection<String> getPropertyNames() {
|
||||
return propertyNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the shard key of an document does not change.
|
||||
* @see <a href="https://docs.mongodb.com/manual/core/sharding-shard-key/#change-a-document-s-shard-key-value">MongoDB
|
||||
* Reference: Change a Document's Shard Key Value</a>
|
||||
*/
|
||||
public boolean isImmutable() {
|
||||
return immutable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return whether the shard key represents a sharded key. Return {@literal false} if the key is not sharded.
|
||||
*
|
||||
* @return {@literal true} if the key is sharded; {@literal false} otherwise.
|
||||
*/
|
||||
public boolean isSharded() {
|
||||
return !propertyNames.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw MongoDB representation of the {@link ShardKey}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public Document getDocument() {
|
||||
|
||||
Document doc = new Document();
|
||||
for (String field : propertyNames) {
|
||||
doc.append(field, shardingValue());
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
private Object shardingValue() {
|
||||
return ObjectUtils.nullSafeEquals(ShardingStrategy.HASH, shardingStrategy) ? "hash" : 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ShardKey} indicating no shard key has been defined.
|
||||
*
|
||||
* @return {@link #NONE}
|
||||
*/
|
||||
public static ShardKey none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ShardingStrategy#RANGE} shard key.
|
||||
*
|
||||
* @param propertyNames must not be {@literal null}.
|
||||
* @return new instance of {@link ShardKey}.
|
||||
*/
|
||||
public static ShardKey range(String... propertyNames) {
|
||||
return new ShardKey(Arrays.asList(propertyNames), ShardingStrategy.RANGE, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ShardingStrategy#RANGE} shard key.
|
||||
*
|
||||
* @param propertyNames must not be {@literal null}.
|
||||
* @return new instance of {@link ShardKey}.
|
||||
*/
|
||||
public static ShardKey hash(String... propertyNames) {
|
||||
return new ShardKey(Arrays.asList(propertyNames), ShardingStrategy.HASH, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn the given {@link ShardKey} into an {@link #isImmutable() immutable} one.
|
||||
*
|
||||
* @param shardKey must not be {@literal null}.
|
||||
* @return new instance of {@link ShardKey} if the given shard key is not already immutable.
|
||||
*/
|
||||
public static ShardKey immutable(ShardKey shardKey) {
|
||||
|
||||
if (shardKey.isImmutable()) {
|
||||
return shardKey;
|
||||
}
|
||||
|
||||
return new ShardKey(shardKey.propertyNames, shardKey.shardingStrategy, true);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
|
||||
/**
|
||||
* The {@link Sharded} annotation provides meta information about the actual distribution of data. The
|
||||
* {@link #shardKey()} is used to distribute documents across shards. <br />
|
||||
* Please see the <a href="https://docs.mongodb.com/manual/sharding/">MongoDB Documentation</a> for more information
|
||||
* about requirements and limitations of sharding.
|
||||
* <p/>
|
||||
* Spring Data adds the shard key to filter queries used for
|
||||
* {@link com.mongodb.client.MongoCollection#replaceOne(org.bson.conversions.Bson, Object)} operations triggered by
|
||||
* {@code save} operations on {@link org.springframework.data.mongodb.core.MongoOperations} and
|
||||
* {@link org.springframework.data.mongodb.core.ReactiveMongoOperations} as well as {@code update/upsert} operations
|
||||
* replacing/upserting a single existing document as long as the given
|
||||
* {@link org.springframework.data.mongodb.core.query.UpdateDefinition} holds a full copy of the entity.
|
||||
* <p/>
|
||||
* All other operations that require the presence of the {@literal shard key} in the filter query need to provide the
|
||||
* information via the {@link org.springframework.data.mongodb.core.query.Query} parameter when invoking the method.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
@Persistent
|
||||
@Inherited
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.TYPE, ElementType.ANNOTATION_TYPE })
|
||||
public @interface Sharded {
|
||||
|
||||
/**
|
||||
* Alias for {@link #shardKey()}.
|
||||
*
|
||||
* @return {@literal _id} by default.
|
||||
* @see #shardKey()
|
||||
*/
|
||||
@AliasFor("shardKey")
|
||||
String[] value() default {};
|
||||
|
||||
/**
|
||||
* The shard key determines the distribution of the collection's documents among the cluster's shards. The shard key
|
||||
* is either a single or multiple indexed properties that exist in every document in the collection.
|
||||
* <p/>
|
||||
* By default the {@literal id} property is used for sharding. <br />
|
||||
* <strong>NOTE</strong> Required indexes are not created automatically. Create these either externally, via
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)}
|
||||
* or by annotating your domain model with {@link org.springframework.data.mongodb.core.index.Indexed}/
|
||||
* {@link org.springframework.data.mongodb.core.index.CompoundIndex} along with enabled
|
||||
* {@link org.springframework.data.mongodb.config.MongoConfigurationSupport#autoIndexCreation() auto index creation}.
|
||||
*
|
||||
* @return an empty key by default. Which indicates to use the entities {@literal id} property.
|
||||
*/
|
||||
@AliasFor("value")
|
||||
String[] shardKey() default {};
|
||||
|
||||
/**
|
||||
* The sharding strategy to use for distributing data across sharded clusters.
|
||||
*
|
||||
* @return {@link ShardingStrategy#RANGE} by default
|
||||
*/
|
||||
ShardingStrategy shardingStrategy() default ShardingStrategy.RANGE;
|
||||
|
||||
/**
|
||||
* As of MongoDB 4.2 it is possible to change the shard key using update. Using immutable shard keys avoids server
|
||||
* round trips to obtain an entities actual shard key from the database.
|
||||
*
|
||||
* @return {@literal false} by default.
|
||||
* @see <a href="https://docs.mongodb.com/manual/core/sharding-shard-key/#change-a-document-s-shard-key-value">MongoDB
|
||||
* Reference: Change a Document's Shard Key Value</a>
|
||||
*/
|
||||
boolean immutableKey() default false;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public enum ShardingStrategy {
|
||||
|
||||
/**
|
||||
* Ranged sharding involves dividing data into ranges based on the shard key values. Each chunk is then assigned a
|
||||
* range based on the shard key values.
|
||||
*/
|
||||
RANGE,
|
||||
|
||||
/**
|
||||
* Hashed Sharding involves computing a hash of the shard key field’s value. Each chunk is then assigned a range based
|
||||
* on the hashed shard key values.
|
||||
*/
|
||||
HASH
|
||||
}
|
||||
@@ -26,9 +26,11 @@ import org.springframework.data.annotation.ReadOnlyProperty;
|
||||
/**
|
||||
* {@link TextScore} marks the property to be considered as the on server calculated {@literal textScore} when doing
|
||||
* full text search. <br />
|
||||
* <b>NOTE</b> Property will not be written when saving entity.
|
||||
* <b>NOTE</b> Property will not be written when saving entity and may be {@literal null} if the document is retrieved
|
||||
* by a regular (i.e. {@literal $text}) query.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.6
|
||||
*/
|
||||
@ReadOnlyProperty
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
|
||||
/**
|
||||
* Callback being invoked after a domain object is materialized from a {@link Document} when reading results.
|
||||
*
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
* @see org.springframework.data.mapping.callback.EntityCallbacks
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface AfterConvertCallback<T> extends EntityCallback<T> {
|
||||
|
||||
/**
|
||||
* Entity callback method invoked after a domain object is materialized from a {@link Document}. Can return either the
|
||||
* same or a modified instance of the domain object.
|
||||
*
|
||||
* @param entity the domain object (the result of the conversion).
|
||||
* @param document must not be {@literal null}.
|
||||
* @param collection name of the collection.
|
||||
* @return the domain object that is the result of reading it from the {@link Document}.
|
||||
*/
|
||||
T onAfterConvert(T entity, Document document, String collection);
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
|
||||
/**
|
||||
* Entity callback triggered after save of a {@link Document}.
|
||||
*
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 3.0
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface AfterSaveCallback<T> extends EntityCallback<T> {
|
||||
|
||||
/**
|
||||
* Entity callback method invoked after a domain object is saved. Can return either the same or a modified instance of
|
||||
* the domain object.
|
||||
*
|
||||
* @param entity the domain object that was saved.
|
||||
* @param document {@link Document} representing the {@code entity}.
|
||||
* @param collection name of the collection.
|
||||
* @return the domain object that was persisted.
|
||||
*/
|
||||
T onAfterSave(T entity, Document document, String collection);
|
||||
}
|
||||
@@ -22,6 +22,7 @@ import org.springframework.data.mapping.callback.EntityCallback;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see org.springframework.data.mapping.callback.EntityCallbacks
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface BeforeConvertCallback<T> extends EntityCallback<T> {
|
||||
|
||||
@@ -24,6 +24,7 @@ import org.springframework.data.mapping.callback.EntityCallback;
|
||||
* @author Mark Paluch
|
||||
* @author Michael J. Simons
|
||||
* @since 2.2
|
||||
* @see org.springframework.data.mapping.callback.EntityCallbacks
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface BeforeSaveCallback<T> extends EntityCallback<T> {
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.reactivestreams.Publisher;
|
||||
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
|
||||
/**
|
||||
* Callback being invoked after a domain object is materialized from a {@link Document} when reading results.
|
||||
*
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
* @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ReactiveAfterConvertCallback<T> extends EntityCallback<T> {
|
||||
|
||||
/**
|
||||
* Entity callback method invoked after a domain object is materialized from a {@link Document}. Can return either the
|
||||
* same or a modified instance of the domain object.
|
||||
*
|
||||
* @param entity the domain object (the result of the conversion).
|
||||
* @param document must not be {@literal null}.
|
||||
* @param collection name of the collection.
|
||||
* @return a {@link Publisher} emitting the domain object that is the result of reading it from the {@link Document}.
|
||||
*/
|
||||
Publisher<T> onAfterConvert(T entity, Document document, String collection);
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.reactivestreams.Publisher;
|
||||
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
|
||||
/**
|
||||
* Entity callback triggered after save of a {@link Document}.
|
||||
*
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 3.0
|
||||
* @see ReactiveEntityCallbacks
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ReactiveAfterSaveCallback<T> extends EntityCallback<T> {
|
||||
|
||||
/**
|
||||
* Entity callback method invoked after a domain object is saved. Can return either the same or a modified instance of
|
||||
* the domain object.
|
||||
*
|
||||
* @param entity the domain object that was saved.
|
||||
* @param document {@link Document} representing the {@code entity}.
|
||||
* @param collection name of the collection.
|
||||
* @return a {@link Publisher} emitting the domain object to be returned to the caller.
|
||||
*/
|
||||
Publisher<T> onAfterSave(T entity, Document document, String collection);
|
||||
}
|
||||
@@ -18,14 +18,13 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
import org.reactivestreams.Publisher;
|
||||
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
|
||||
/**
|
||||
* Callback being invoked before a domain object is converted to be persisted.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see ReactiveEntityCallbacks
|
||||
* @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ReactiveBeforeConvertCallback<T> extends EntityCallback<T> {
|
||||
|
||||
@@ -19,14 +19,13 @@ import org.bson.Document;
|
||||
import org.reactivestreams.Publisher;
|
||||
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
|
||||
/**
|
||||
* Entity callback triggered before save of a document.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see ReactiveEntityCallbacks
|
||||
* @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ReactiveBeforeSaveCallback<T> extends EntityCallback<T> {
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.messaging;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
@@ -167,21 +168,38 @@ public class ChangeStreamRequest<T>
|
||||
|
||||
private final @Nullable String databaseName;
|
||||
private final @Nullable String collectionName;
|
||||
private final @Nullable Duration maxAwaitTime;
|
||||
private final ChangeStreamOptions options;
|
||||
|
||||
/**
|
||||
* Create new {@link ChangeStreamRequestOptions}.
|
||||
*
|
||||
* @param databaseName can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @param options must not be {@literal null}.
|
||||
*/
|
||||
public ChangeStreamRequestOptions(@Nullable String databaseName, @Nullable String collectionName,
|
||||
ChangeStreamOptions options) {
|
||||
this(databaseName, collectionName, null, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link ChangeStreamRequestOptions}.
|
||||
*
|
||||
* @param databaseName can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @param maxAwaitTime can be {@literal null}.
|
||||
* @param options must not be {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public ChangeStreamRequestOptions(@Nullable String databaseName, @Nullable String collectionName,
|
||||
@Nullable Duration maxAwaitTime, ChangeStreamOptions options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
this.collectionName = collectionName;
|
||||
this.databaseName = databaseName;
|
||||
this.maxAwaitTime = maxAwaitTime;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
@@ -219,6 +237,15 @@ public class ChangeStreamRequest<T>
|
||||
public String getDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.monitor.SubscriptionRequest.RequestOptions#maxAwaitTime()
|
||||
*/
|
||||
@Override
|
||||
public Duration maxAwaitTime() {
|
||||
return maxAwaitTime != null ? maxAwaitTime : RequestOptions.super.maxAwaitTime();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -232,8 +259,9 @@ public class ChangeStreamRequest<T>
|
||||
|
||||
private @Nullable String databaseName;
|
||||
private @Nullable String collectionName;
|
||||
private @Nullable Duration maxAwaitTime;
|
||||
private @Nullable MessageListener<ChangeStreamDocument<Document>, ? super T> listener;
|
||||
private ChangeStreamOptionsBuilder delegate = ChangeStreamOptions.builder();
|
||||
private final ChangeStreamOptionsBuilder delegate = ChangeStreamOptions.builder();
|
||||
|
||||
private ChangeStreamRequestBuilder() {}
|
||||
|
||||
@@ -330,7 +358,7 @@ public class ChangeStreamRequest<T>
|
||||
* @see ChangeStreamOptions#getCollation()
|
||||
* @see ChangeStreamOptionsBuilder#collation(Collation)
|
||||
*/
|
||||
public ChangeStreamRequestBuilder collation(Collation collation) {
|
||||
public ChangeStreamRequestBuilder<T> collation(Collation collation) {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
|
||||
@@ -417,6 +445,20 @@ public class ChangeStreamRequest<T>
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cursors maximum wait time on the server (for a new Document to be emitted).
|
||||
*
|
||||
* @param timeout must not be {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public ChangeStreamRequestBuilder<T> maxAwaitTime(Duration timeout) {
|
||||
|
||||
Assert.notNull(timeout, "timeout not be null!");
|
||||
|
||||
this.maxAwaitTime = timeout;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the build {@link ChangeStreamRequest}.
|
||||
*/
|
||||
@@ -425,7 +467,7 @@ public class ChangeStreamRequest<T>
|
||||
Assert.notNull(listener, "MessageListener must not be null!");
|
||||
|
||||
return new ChangeStreamRequest<>(listener,
|
||||
new ChangeStreamRequestOptions(databaseName, collectionName, delegate.build()));
|
||||
new ChangeStreamRequestOptions(databaseName, collectionName, maxAwaitTime, delegate.build()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.bson.BsonTimestamp;
|
||||
@@ -135,6 +136,10 @@ class ChangeStreamTask extends CursorReadingTask<ChangeStreamDocument<Document>,
|
||||
iterable = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class);
|
||||
}
|
||||
|
||||
if (!options.maxAwaitTime().isZero()) {
|
||||
iterable = iterable.maxAwaitTime(options.maxAwaitTime().toMillis(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
if (!resumeToken.isEmpty()) {
|
||||
|
||||
if (resumeAfter) {
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.messaging;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -72,6 +74,16 @@ public interface SubscriptionRequest<S, T, O extends RequestOptions> {
|
||||
@Nullable
|
||||
String getCollectionName();
|
||||
|
||||
/**
|
||||
* Get the maximum wait time (the time till the next Document is emitted) to apply when reading from the collection.
|
||||
*
|
||||
* @return never {@literal null}. {@link Duration#ZERO} by default.
|
||||
* @since 3.0
|
||||
*/
|
||||
default Duration maxAwaitTime() {
|
||||
return Duration.ZERO;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create empty options.
|
||||
*
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.messaging;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
@@ -75,6 +77,10 @@ class TailableCursorTask extends CursorReadingTask<Document, Object> {
|
||||
iterable = iterable.collation(collation);
|
||||
}
|
||||
|
||||
if (!options.maxAwaitTime().isZero()) {
|
||||
iterable = iterable.maxAwaitTime(options.maxAwaitTime().toMillis(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
return iterable.iterator();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,6 +95,8 @@ public class Query {
|
||||
*/
|
||||
public Query addCriteria(CriteriaDefinition criteriaDefinition) {
|
||||
|
||||
Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null!");
|
||||
|
||||
CriteriaDefinition existing = this.criteria.get(criteriaDefinition.getKey());
|
||||
String key = criteriaDefinition.getKey();
|
||||
|
||||
@@ -517,13 +519,11 @@ public class Query {
|
||||
}
|
||||
};
|
||||
|
||||
target.criteria.putAll(source.criteria);
|
||||
target.skip = source.skip;
|
||||
target.limit = source.limit;
|
||||
target.sort = Sort.unsorted().and(source.sort);
|
||||
target.hint = source.hint;
|
||||
target.collation = source.collation;
|
||||
target.restrictedTypes.addAll(source.restrictedTypes);
|
||||
target.skip = source.getSkip();
|
||||
target.limit = source.getLimit();
|
||||
target.hint = source.getHint();
|
||||
target.collation = source.getCollation();
|
||||
target.restrictedTypes.addAll(source.getRestrictedTypes());
|
||||
|
||||
if (source.getMeta().hasValues()) {
|
||||
target.setMeta(new Meta(source.getMeta()));
|
||||
|
||||
@@ -15,7 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.springframework.util.ObjectUtils.*;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* A {@link Term} defines one or multiple words {@link Type#WORD} or phrases {@link Type#PHRASE} to be used in the
|
||||
@@ -90,6 +93,47 @@ public class Term {
|
||||
return negated ? negateRaw(formatted) : formatted;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(o instanceof Term)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Term term = (Term) o;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(negated, term.negated) && ObjectUtils.nullSafeEquals(type, term.type)
|
||||
&& ObjectUtils.nullSafeEquals(raw, term.raw);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += ObjectUtils.nullSafeHashCode(type);
|
||||
result += ObjectUtils.nullSafeHashCode(raw);
|
||||
result += ObjectUtils.nullSafeHashCode(negated);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return getFormatted();
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.List;
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -34,7 +35,7 @@ import org.springframework.util.StringUtils;
|
||||
public class TextCriteria implements CriteriaDefinition {
|
||||
|
||||
private final List<Term> terms;
|
||||
private @Nullable String language;
|
||||
private final @Nullable String language;
|
||||
private @Nullable Boolean caseSensitive;
|
||||
private @Nullable Boolean diacriticSensitive;
|
||||
|
||||
@@ -51,7 +52,7 @@ public class TextCriteria implements CriteriaDefinition {
|
||||
private TextCriteria(@Nullable String language) {
|
||||
|
||||
this.language = language;
|
||||
this.terms = new ArrayList<Term>();
|
||||
this.terms = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -231,9 +232,47 @@ public class TextCriteria implements CriteriaDefinition {
|
||||
return new Document("$text", document);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof TextCriteria)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TextCriteria that = (TextCriteria) o;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(terms, that.terms) && ObjectUtils.nullSafeEquals(language, that.language)
|
||||
&& ObjectUtils.nullSafeEquals(caseSensitive, that.caseSensitive)
|
||||
&& ObjectUtils.nullSafeEquals(diacriticSensitive, that.diacriticSensitive);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 17;
|
||||
|
||||
result += ObjectUtils.nullSafeHashCode(terms);
|
||||
result += ObjectUtils.nullSafeHashCode(language);
|
||||
result += ObjectUtils.nullSafeHashCode(caseSensitive);
|
||||
result += ObjectUtils.nullSafeHashCode(diacriticSensitive);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private String join(Iterable<Term> terms) {
|
||||
|
||||
List<String> result = new ArrayList<String>();
|
||||
List<String> result = new ArrayList<>();
|
||||
|
||||
for (Term term : terms) {
|
||||
if (term != null) {
|
||||
|
||||
@@ -0,0 +1,158 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.gridfs;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
|
||||
/**
|
||||
* A common interface when dealing with GridFs items using Spring Data.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface GridFsObject<ID, CONTENT> {
|
||||
|
||||
/**
|
||||
* The {@link GridFSFile#getId()} value converted into its simple java type. <br />
|
||||
* A {@link org.bson.BsonString} will be converted to plain {@link String}.
|
||||
*
|
||||
* @return can be {@literal null} depending on the implementation.
|
||||
*/
|
||||
@Nullable
|
||||
ID getFileId();
|
||||
|
||||
/**
|
||||
* The filename.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String getFilename();
|
||||
|
||||
/**
|
||||
* The actual file content.
|
||||
*
|
||||
* @return
|
||||
* @throws IllegalStateException if the content cannot be obtained.
|
||||
*/
|
||||
CONTENT getContent();
|
||||
|
||||
/**
|
||||
* Additional information like file metadata (eg. contentType).
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Options getOptions();
|
||||
|
||||
/**
|
||||
* Additional, context relevant information.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class Options {
|
||||
|
||||
private final Document metadata;
|
||||
private final int chunkSize;
|
||||
|
||||
private Options(Document metadata, int chunkSize) {
|
||||
|
||||
this.metadata = metadata;
|
||||
this.chunkSize = chunkSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory to create empty options.
|
||||
*
|
||||
* @return new instance of {@link Options}.
|
||||
*/
|
||||
public static Options none() {
|
||||
return new Options(new Document(), -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create {@link Options} with given content type.
|
||||
*
|
||||
* @param contentType
|
||||
* @return new instance of {@link Options}.
|
||||
*/
|
||||
public static Options typed(String contentType) {
|
||||
return new Options(new Document("_contentType", contentType), -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create {@link Options} by extracting information from the given {@link GridFSFile}.
|
||||
*
|
||||
* @param gridFSFile can be {@literal null}, returns {@link #none()} in that case.
|
||||
* @return new instance of {@link Options}.
|
||||
*/
|
||||
public static Options from(@Nullable GridFSFile gridFSFile) {
|
||||
return gridFSFile != null ? new Options(gridFSFile.getMetadata(), gridFSFile.getChunkSize()) : none();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the associated content type.
|
||||
*
|
||||
* @param contentType must not be {@literal null}.
|
||||
* @return new instance of {@link Options}.
|
||||
*/
|
||||
public Options contentType(String contentType) {
|
||||
|
||||
Options target = new Options(new Document(metadata), chunkSize);
|
||||
target.metadata.put("_contentType", contentType);
|
||||
return target;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param metadata
|
||||
* @return new instance of {@link Options}.
|
||||
*/
|
||||
public Options metadata(Document metadata) {
|
||||
return new Options(metadata, chunkSize);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param chunkSize the file chunk size to use.
|
||||
* @return new instance of {@link Options}.
|
||||
*/
|
||||
public Options chunkSize(int chunkSize) {
|
||||
return new Options(metadata, chunkSize);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public Document getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the chunk size to use.
|
||||
*/
|
||||
public int getChunkSize() {
|
||||
return chunkSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal null} if not set.
|
||||
*/
|
||||
@Nullable
|
||||
String getContentType() {
|
||||
return (String) metadata.get("_contentType");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -22,7 +22,10 @@ import org.bson.types.ObjectId;
|
||||
import org.springframework.core.io.support.ResourcePatternResolver;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.gridfs.GridFsUpload.GridFsUploadBuilder;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.gridfs.GridFSFindIterable;
|
||||
|
||||
@@ -45,7 +48,9 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* @param filename must not be {@literal null} or empty.
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, String filename);
|
||||
default ObjectId store(InputStream content, String filename) {
|
||||
return store(content, filename, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name.
|
||||
@@ -54,7 +59,9 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable Object metadata);
|
||||
default ObjectId store(InputStream content, @Nullable Object metadata) {
|
||||
return store(content, null, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name.
|
||||
@@ -63,7 +70,9 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable Document metadata);
|
||||
default ObjectId store(InputStream content, @Nullable Document metadata) {
|
||||
return store(content, null, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name and content type.
|
||||
@@ -73,7 +82,9 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* @param contentType can be {@literal null}.
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType);
|
||||
default ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType) {
|
||||
return store(content, filename, contentType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name using the given metadata. The metadata object will be
|
||||
@@ -84,7 +95,9 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable Object metadata);
|
||||
default ObjectId store(InputStream content, @Nullable String filename, @Nullable Object metadata) {
|
||||
return store(content, filename, null, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name and content type using the given metadata. The metadata
|
||||
@@ -107,7 +120,9 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable Document metadata);
|
||||
default ObjectId store(InputStream content, @Nullable String filename, @Nullable Document metadata) {
|
||||
return store(content, filename, null, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name and content type using the given metadata.
|
||||
@@ -118,8 +133,35 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Document metadata);
|
||||
default ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Document metadata) {
|
||||
|
||||
GridFsUploadBuilder<ObjectId> uploadBuilder = GridFsUpload.fromStream(content);
|
||||
if (StringUtils.hasText(filename)) {
|
||||
uploadBuilder.filename(filename);
|
||||
}
|
||||
if (StringUtils.hasText(contentType)) {
|
||||
uploadBuilder.contentType(contentType);
|
||||
}
|
||||
if (!ObjectUtils.isEmpty(metadata)) {
|
||||
uploadBuilder.metadata(metadata);
|
||||
}
|
||||
|
||||
return store(uploadBuilder.build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the given {@link GridFsObject}, likely a {@link GridFsUpload}, into into a file with given
|
||||
* {@link GridFsObject#getFilename() name}. If the {@link GridFsObject#getFileId()} is set, the file will be stored
|
||||
* with that id, otherwise the server auto creates a new id. <br />
|
||||
*
|
||||
* @param upload the {@link GridFsObject} (most likely a {@link GridFsUpload}) to be stored.
|
||||
* @param <T> id type of the underlying {@link com.mongodb.client.gridfs.model.GridFSFile}
|
||||
* @return the id of the stored file. Either an auto created value or {@link GridFsObject#getFileId()}, but never
|
||||
* {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
<T> T store(GridFsObject<T, InputStream> upload);
|
||||
|
||||
/**
|
||||
* Returns all files matching the given query. Note, that currently {@link Sort} criterias defined at the
|
||||
|
||||
@@ -23,6 +23,7 @@ import java.util.Optional;
|
||||
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -37,7 +38,7 @@ import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
* @author Hartmut Lang
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class GridFsResource extends InputStreamResource {
|
||||
public class GridFsResource extends InputStreamResource implements GridFsObject<Object, InputStream> {
|
||||
|
||||
static final String CONTENT_TYPE_FIELD = "_contentType";
|
||||
private static final ByteArrayInputStream EMPTY_INPUT_STREAM = new ByteArrayInputStream(new byte[0]);
|
||||
@@ -169,6 +170,17 @@ public class GridFsResource extends InputStreamResource {
|
||||
return getGridFSFile().getId();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getFileId()
|
||||
*/
|
||||
@Override
|
||||
public Object getFileId() {
|
||||
|
||||
Assert.state(exists(), () -> String.format("%s does not exist.", getDescription()));
|
||||
return BsonUtils.toJavaType(getGridFSFile().getId());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the underlying {@link GridFSFile}. Can be {@literal null} if absent.
|
||||
* @since 2.2
|
||||
@@ -195,6 +207,29 @@ public class GridFsResource extends InputStreamResource {
|
||||
.orElseThrow(() -> new MongoGridFSException("No contentType data for this GridFS file"));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getContent()
|
||||
*/
|
||||
@Override
|
||||
public InputStream getContent() {
|
||||
|
||||
try {
|
||||
return getInputStream();
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Failed to obtain input stream for " + filename, e);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getOptions()
|
||||
*/
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
return Options.from(getGridFSFile());
|
||||
}
|
||||
|
||||
private void verifyExists() throws FileNotFoundException {
|
||||
|
||||
if (!exists()) {
|
||||
|
||||
@@ -29,6 +29,7 @@ import org.springframework.core.io.support.ResourcePatternResolver;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -38,6 +39,7 @@ import com.mongodb.client.gridfs.GridFSBucket;
|
||||
import com.mongodb.client.gridfs.GridFSBuckets;
|
||||
import com.mongodb.client.gridfs.GridFSFindIterable;
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
import com.mongodb.client.gridfs.model.GridFSUploadOptions;
|
||||
|
||||
/**
|
||||
* {@link GridFsOperations} implementation to store content into MongoDB GridFS.
|
||||
@@ -85,48 +87,6 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
this.bucket = bucket;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String)
|
||||
*/
|
||||
public ObjectId store(InputStream content, String filename) {
|
||||
return store(content, filename, (Object) null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public ObjectId store(InputStream content, @Nullable Object metadata) {
|
||||
return store(content, null, metadata);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, com.mongodb.Document)
|
||||
*/
|
||||
@Override
|
||||
public ObjectId store(InputStream content, @Nullable Document metadata) {
|
||||
return store(content, null, metadata);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.String)
|
||||
*/
|
||||
public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType) {
|
||||
return store(content, filename, contentType, (Object) null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.Object)
|
||||
*/
|
||||
public ObjectId store(InputStream content, @Nullable String filename, @Nullable Object metadata) {
|
||||
return store(content, filename, null, metadata);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.String, java.lang.Object)
|
||||
@@ -138,21 +98,24 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, com.mongodb.Document)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#save(org.springframework.data.mongodb.gridfs.GridFsObject)
|
||||
*/
|
||||
public ObjectId store(InputStream content, @Nullable String filename, @Nullable Document metadata) {
|
||||
return this.store(content, filename, null, metadata);
|
||||
}
|
||||
public <T> T store(GridFsObject<T, InputStream> upload) {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, com.mongodb.Document)
|
||||
*/
|
||||
public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Document metadata) {
|
||||
GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(),
|
||||
upload.getOptions().getMetadata());
|
||||
|
||||
Assert.notNull(content, "InputStream must not be null!");
|
||||
return getGridFs().uploadFromStream(filename, content, computeUploadOptionsFor(contentType, metadata));
|
||||
if (upload.getOptions().getChunkSize() > 0) {
|
||||
uploadOptions.chunkSizeBytes(upload.getOptions().getChunkSize());
|
||||
}
|
||||
|
||||
if (upload.getFileId() == null) {
|
||||
return (T) getGridFs().uploadFromStream(upload.getFilename(), upload.getContent(), uploadOptions);
|
||||
}
|
||||
|
||||
getGridFs().uploadFromStream(BsonUtils.simpleToBsonValue(upload.getFileId()), upload.getFilename(),
|
||||
upload.getContent(), uploadOptions);
|
||||
return upload.getFileId();
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -0,0 +1,245 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.gridfs;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StreamUtils;
|
||||
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
|
||||
/**
|
||||
* Upload descriptor for a GridFS file upload.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class GridFsUpload<ID> implements GridFsObject<ID, InputStream> {
|
||||
|
||||
private final @Nullable ID id;
|
||||
private final Lazy<InputStream> dataStream;
|
||||
private final String filename;
|
||||
private final Options options;
|
||||
|
||||
private GridFsUpload(@Nullable ID id, Lazy<InputStream> dataStream, String filename, Options options) {
|
||||
|
||||
Assert.notNull(dataStream, "Data Stream must not be null");
|
||||
Assert.notNull(filename, "Filename must not be null");
|
||||
Assert.notNull(options, "Options must not be null");
|
||||
|
||||
this.id = id;
|
||||
this.dataStream = dataStream;
|
||||
this.filename = filename;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link GridFSFile#getId()} value converted into its simple java type. <br />
|
||||
* A {@link org.bson.BsonString} will be converted to plain {@link String}.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getFileId()
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
public ID getFileId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getFielname()
|
||||
*/
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return filename;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getContent()
|
||||
*/
|
||||
@Override
|
||||
public InputStream getContent() {
|
||||
return dataStream.orElse(StreamUtils.emptyInput());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getOptions()
|
||||
*/
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link GridFsUpload} for the given {@link InputStream}.
|
||||
*
|
||||
* @param stream must not be {@literal null}.
|
||||
* @return new instance of {@link GridFsUpload}.
|
||||
*/
|
||||
public static GridFsUploadBuilder<ObjectId> fromStream(InputStream stream) {
|
||||
return new GridFsUploadBuilder<ObjectId>().content(stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder to create {@link GridFsUpload} in a fluent way.
|
||||
*
|
||||
* @param <T> the target id type.
|
||||
*/
|
||||
public static class GridFsUploadBuilder<T> {
|
||||
|
||||
private Object id;
|
||||
private Lazy<InputStream> dataStream;
|
||||
private String filename;
|
||||
private Options options = Options.none();
|
||||
|
||||
private GridFsUploadBuilder() {}
|
||||
|
||||
/**
|
||||
* Define the content of the file to upload.
|
||||
*
|
||||
* @param stream the upload content.
|
||||
* @return this.
|
||||
*/
|
||||
public GridFsUploadBuilder<T> content(InputStream stream) {
|
||||
|
||||
Assert.notNull(stream, "InputStream must not be null");
|
||||
|
||||
return content(() -> stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the content of the file to upload.
|
||||
*
|
||||
* @param stream the upload content.
|
||||
* @return this.
|
||||
*/
|
||||
public GridFsUploadBuilder<T> content(Supplier<InputStream> stream) {
|
||||
|
||||
Assert.notNull(stream, "InputStream Supplier must not be null");
|
||||
|
||||
this.dataStream = Lazy.of(stream);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the id to use.
|
||||
*
|
||||
* @param id the id to save the content to.
|
||||
* @param <T1>
|
||||
* @return this.
|
||||
*/
|
||||
public <T1> GridFsUploadBuilder<T1> id(T1 id) {
|
||||
|
||||
this.id = id;
|
||||
return (GridFsUploadBuilder<T1>) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filename.
|
||||
*
|
||||
* @param filename the filename to use.
|
||||
* @return this.
|
||||
*/
|
||||
public GridFsUploadBuilder<T> filename(String filename) {
|
||||
|
||||
this.filename = filename;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set additional file information.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public GridFsUploadBuilder<T> options(Options options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null");
|
||||
|
||||
this.options = options;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the file metadata.
|
||||
*
|
||||
* @param metadata must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public GridFsUploadBuilder<T> metadata(Document metadata) {
|
||||
|
||||
this.options = this.options.metadata(metadata);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the upload chunk size in bytes.
|
||||
*
|
||||
* @param chunkSize use negative number for default.
|
||||
* @return this.
|
||||
*/
|
||||
public GridFsUploadBuilder<T> chunkSize(int chunkSize) {
|
||||
|
||||
this.options = this.options.chunkSize(chunkSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set id, filename, metadata and chunk size from given file.
|
||||
*
|
||||
* @param gridFSFile must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public GridFsUploadBuilder<T> gridFsFile(GridFSFile gridFSFile) {
|
||||
|
||||
Assert.notNull(gridFSFile, "GridFSFile must not be null");
|
||||
|
||||
this.id = gridFSFile.getId();
|
||||
this.filename = gridFSFile.getFilename();
|
||||
this.options = this.options.metadata(gridFSFile.getMetadata());
|
||||
this.options = this.options.chunkSize(gridFSFile.getChunkSize());
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the content type.
|
||||
*
|
||||
* @param contentType must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public GridFsUploadBuilder<T> contentType(String contentType) {
|
||||
|
||||
this.options = this.options.contentType(contentType);
|
||||
return this;
|
||||
}
|
||||
|
||||
public GridFsUpload<T> build() {
|
||||
return new GridFsUpload(id, dataStream, filename, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,7 +24,10 @@ import org.reactivestreams.Publisher;
|
||||
import org.springframework.core.io.buffer.DataBuffer;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.gridfs.ReactiveGridFsUpload.ReactiveGridFsUploadBuilder;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
|
||||
@@ -137,8 +140,36 @@ public interface ReactiveGridFsOperations {
|
||||
* @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just
|
||||
* created.
|
||||
*/
|
||||
Mono<ObjectId> store(Publisher<DataBuffer> content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Document metadata);
|
||||
default Mono<ObjectId> store(Publisher<DataBuffer> content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Document metadata) {
|
||||
|
||||
ReactiveGridFsUploadBuilder<ObjectId> uploadBuilder = ReactiveGridFsUpload.fromPublisher(content);
|
||||
|
||||
if (StringUtils.hasText(filename)) {
|
||||
uploadBuilder.filename(filename);
|
||||
}
|
||||
if (StringUtils.hasText(contentType)) {
|
||||
uploadBuilder.contentType(contentType);
|
||||
}
|
||||
if (!ObjectUtils.isEmpty(metadata)) {
|
||||
uploadBuilder.metadata(metadata);
|
||||
}
|
||||
|
||||
return store(uploadBuilder.build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the given {@link GridFsObject}, likely a {@link GridFsUpload}, into into a file with given
|
||||
* {@link GridFsObject#getFilename() name}. If the {@link GridFsObject#getFileId()} is set, the file will be stored
|
||||
* with that id, otherwise the server auto creates a new id. <br />
|
||||
*
|
||||
* @param upload the {@link GridFsObject} (most likely a {@link GridFsUpload}) to be stored.
|
||||
* @param <T> id type of the underlying {@link com.mongodb.client.gridfs.model.GridFSFile}
|
||||
* @return {@link Mono} emitting the id of the stored file which is either an auto created value or
|
||||
* {@link GridFsObject#getFileId()}.
|
||||
* @since 3.0
|
||||
*/
|
||||
<T> Mono<T> store(GridFsObject<T, Publisher<DataBuffer>> upload);
|
||||
|
||||
/**
|
||||
* Returns a {@link Flux} emitting all files matching the given query. <br />
|
||||
|
||||
@@ -21,12 +21,14 @@ import reactor.core.publisher.Mono;
|
||||
import java.io.InputStream;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.core.io.buffer.DataBuffer;
|
||||
import org.springframework.core.io.buffer.DataBufferFactory;
|
||||
import org.springframework.core.io.buffer.DataBufferUtils;
|
||||
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -41,10 +43,12 @@ import com.mongodb.reactivestreams.client.gridfs.GridFSDownloadPublisher;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveGridFsResource {
|
||||
public class ReactiveGridFsResource implements GridFsObject<Object, Publisher<DataBuffer>> {
|
||||
|
||||
private final AtomicBoolean consumed = new AtomicBoolean(false);
|
||||
|
||||
private final @Nullable Object id;
|
||||
private final Options options;
|
||||
private final String filename;
|
||||
private final @Nullable GridFSDownloadPublisher downloadPublisher;
|
||||
private final DataBufferFactory dataBufferFactory;
|
||||
@@ -56,21 +60,43 @@ public class ReactiveGridFsResource {
|
||||
* @param downloadPublisher
|
||||
*/
|
||||
public ReactiveGridFsResource(String filename, @Nullable GridFSDownloadPublisher downloadPublisher) {
|
||||
this(filename, downloadPublisher, new DefaultDataBufferFactory());
|
||||
this(null, filename, Options.none(), downloadPublisher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new, absent {@link ReactiveGridFsResource}.
|
||||
*
|
||||
* @param id
|
||||
* @param filename filename of the absent resource.
|
||||
* @param options
|
||||
* @param downloadPublisher
|
||||
* @since 3.0
|
||||
*/
|
||||
public ReactiveGridFsResource(@Nullable Object id, String filename, Options options,
|
||||
@Nullable GridFSDownloadPublisher downloadPublisher) {
|
||||
this(id, filename, options, downloadPublisher, new DefaultDataBufferFactory());
|
||||
}
|
||||
|
||||
ReactiveGridFsResource(GridFSFile file, @Nullable GridFSDownloadPublisher downloadPublisher, DataBufferFactory dataBufferFactory) {
|
||||
this(file.getId(), file.getFilename(), Options.from(file), downloadPublisher, dataBufferFactory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new, absent {@link ReactiveGridFsResource}.
|
||||
*
|
||||
* @param id
|
||||
* @param filename filename of the absent resource.
|
||||
* @param options
|
||||
* @param downloadPublisher
|
||||
* @param dataBufferFactory
|
||||
* @since 3.0
|
||||
*/
|
||||
ReactiveGridFsResource(String filename, @Nullable GridFSDownloadPublisher downloadPublisher,
|
||||
DataBufferFactory dataBufferFactory) {
|
||||
ReactiveGridFsResource(@Nullable Object id, String filename, Options options,
|
||||
@Nullable GridFSDownloadPublisher downloadPublisher, DataBufferFactory dataBufferFactory) {
|
||||
|
||||
this.id = id;
|
||||
this.filename = filename;
|
||||
this.options = options;
|
||||
this.downloadPublisher = downloadPublisher;
|
||||
this.dataBufferFactory = dataBufferFactory;
|
||||
}
|
||||
@@ -88,6 +114,15 @@ public class ReactiveGridFsResource {
|
||||
return new ReactiveGridFsResource(filename, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getFileId()
|
||||
*/
|
||||
@Override
|
||||
public Object getFileId() {
|
||||
return id instanceof BsonValue ? BsonUtils.toJavaType((BsonValue) id) : id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.springframework.core.io.AbstractResource#getFilename()
|
||||
*/
|
||||
@@ -140,6 +175,24 @@ public class ReactiveGridFsResource {
|
||||
return createDownloadStream(downloadPublisher);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getContent()
|
||||
*/
|
||||
@Override
|
||||
public Flux<DataBuffer> getContent() {
|
||||
return getDownloadStream();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getOptions()
|
||||
*/
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the download stream emitting chunks of data with given {@code chunkSize} as they come in.
|
||||
*
|
||||
|
||||
@@ -23,9 +23,11 @@ import reactor.core.publisher.Mono;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.reactivestreams.Publisher;
|
||||
|
||||
import org.springframework.core.io.buffer.DataBuffer;
|
||||
import org.springframework.core.io.buffer.DataBufferFactory;
|
||||
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
|
||||
@@ -34,13 +36,13 @@ import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
import com.mongodb.client.gridfs.model.GridFSUploadOptions;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
import com.mongodb.reactivestreams.client.gridfs.GridFSBucket;
|
||||
import com.mongodb.reactivestreams.client.gridfs.GridFSBuckets;
|
||||
import com.mongodb.reactivestreams.client.gridfs.GridFSFindPublisher;
|
||||
@@ -54,6 +56,7 @@ import com.mongodb.reactivestreams.client.gridfs.GridFSUploadPublisher;
|
||||
* @author Nick Stolwijk
|
||||
* @author Denis Zavedeev
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements ReactiveGridFsOperations {
|
||||
@@ -120,20 +123,27 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.ReactiveGridFsOperations#store(org.reactivestreams.Publisher, java.lang.String, java.lang.String, org.bson.Document)
|
||||
* @see org.springframework.data.mongodb.gridfs.ReactiveGridFsOperations#save(org.springframework.data.mongodb.gridfs.GridFsObject)
|
||||
*/
|
||||
@Override
|
||||
public Mono<ObjectId> store(Publisher<DataBuffer> content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Document metadata) {
|
||||
public <T> Mono<T> store(GridFsObject<T, Publisher<DataBuffer>> upload) {
|
||||
|
||||
Assert.notNull(content, "Content must not be null!");
|
||||
GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(),
|
||||
upload.getOptions().getMetadata());
|
||||
|
||||
GridFSUploadOptions uploadOptions = new GridFSUploadOptions();
|
||||
uploadOptions.metadata(metadata);
|
||||
if (upload.getOptions().getChunkSize() > 0) {
|
||||
uploadOptions.chunkSizeBytes(upload.getOptions().getChunkSize());
|
||||
}
|
||||
|
||||
GridFSUploadPublisher<ObjectId> publisher = getGridFs().uploadFromPublisher(filename,
|
||||
Flux.from(content).map(DataBuffer::asByteBuffer), uploadOptions);
|
||||
return Mono.from(publisher);
|
||||
String filename = upload.getFilename();
|
||||
Flux<ByteBuffer> source = Flux.from(upload.getContent()).map(DataBuffer::asByteBuffer);
|
||||
T fileId = upload.getFileId();
|
||||
|
||||
if (fileId == null) {
|
||||
return (Mono<T>) createMono(new AutoIdCreatingUploadCallback(filename, source, uploadOptions));
|
||||
}
|
||||
|
||||
UploadCallback callback = new UploadCallback(BsonUtils.simpleToBsonValue(fileId), filename, source, uploadOptions);
|
||||
return createMono(callback).thenReturn(fileId);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -142,7 +152,11 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
*/
|
||||
@Override
|
||||
public Flux<GridFSFile> find(Query query) {
|
||||
return Flux.from(prepareQuery(query));
|
||||
|
||||
Document queryObject = getMappedQuery(query.getQueryObject());
|
||||
Document sortObject = getMappedQuery(query.getSortObject());
|
||||
|
||||
return createFlux(new FindCallback(query, queryObject, sortObject));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -152,19 +166,22 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
@Override
|
||||
public Mono<GridFSFile> findOne(Query query) {
|
||||
|
||||
return Flux.from(prepareQuery(query).limit(2)) //
|
||||
Document queryObject = getMappedQuery(query.getQueryObject());
|
||||
Document sortObject = getMappedQuery(query.getSortObject());
|
||||
|
||||
return createFlux(new FindLimitCallback(query, queryObject, sortObject, 2)) //
|
||||
.collectList() //
|
||||
.flatMap(it -> {
|
||||
if (it.isEmpty()) {
|
||||
return Mono.empty();
|
||||
.handle((files, sink) -> {
|
||||
|
||||
if (files.size() == 1) {
|
||||
sink.next(files.get(0));
|
||||
return;
|
||||
}
|
||||
|
||||
if (it.size() > 1) {
|
||||
return Mono.error(new IncorrectResultSizeDataAccessException(
|
||||
if (files.size() > 1) {
|
||||
sink.error(new IncorrectResultSizeDataAccessException(
|
||||
"Query " + SerializationUtils.serializeToJsonSafely(query) + " returned non unique result.", 1));
|
||||
}
|
||||
|
||||
return Mono.just(it.get(0));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -174,7 +191,11 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
*/
|
||||
@Override
|
||||
public Mono<GridFSFile> findFirst(Query query) {
|
||||
return Flux.from(prepareQuery(query).limit(1)).next();
|
||||
|
||||
Document queryObject = getMappedQuery(query.getQueryObject());
|
||||
Document sortObject = getMappedQuery(query.getSortObject());
|
||||
|
||||
return createFlux(new FindLimitCallback(query, queryObject, sortObject, 1)).next();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -183,7 +204,7 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> delete(Query query) {
|
||||
return find(query).flatMap(it -> getGridFs().delete(it.getId())).then();
|
||||
return find(query).flatMap(it -> createMono(new DeleteCallback(it.getId()))).then();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -208,9 +229,8 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
|
||||
Assert.notNull(file, "GridFSFile must not be null!");
|
||||
|
||||
return Mono.fromSupplier(() -> {
|
||||
return new ReactiveGridFsResource(file.getFilename(), getGridFs().downloadToPublisher(file.getId()), dataBufferFactory);
|
||||
});
|
||||
return doGetBucket()
|
||||
.map(it -> new ReactiveGridFsResource(file, it.downloadToPublisher(file.getId()), dataBufferFactory));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -235,34 +255,152 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
return getResource(locationPattern).flux();
|
||||
}
|
||||
|
||||
protected GridFSFindPublisher prepareQuery(Query query) {
|
||||
/**
|
||||
* Create a reusable Mono for a {@link ReactiveBucketCallback}. It's up to the developer to choose to obtain a new
|
||||
* {@link Flux} or to reuse the {@link Flux}.
|
||||
*
|
||||
* @param callback must not be {@literal null}
|
||||
* @return a {@link Mono} wrapping the {@link ReactiveBucketCallback}.
|
||||
*/
|
||||
public <T> Mono<T> createMono(ReactiveBucketCallback<T> callback) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(callback, "ReactiveBucketCallback must not be null!");
|
||||
|
||||
Document queryObject = getMappedQuery(query.getQueryObject());
|
||||
Document sortObject = getMappedQuery(query.getSortObject());
|
||||
|
||||
GridFSFindPublisher publisherToUse = getGridFs().find(queryObject).sort(sortObject);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
publisherToUse = publisherToUse.limit(query.getLimit());
|
||||
}
|
||||
|
||||
if (query.getSkip() > 0) {
|
||||
publisherToUse = publisherToUse.skip(Math.toIntExact(query.getSkip()));
|
||||
}
|
||||
|
||||
Integer cursorBatchSize = query.getMeta().getCursorBatchSize();
|
||||
if (cursorBatchSize != null) {
|
||||
publisherToUse = publisherToUse.batchSize(cursorBatchSize);
|
||||
}
|
||||
|
||||
return publisherToUse;
|
||||
return doGetBucket().flatMap(bucket -> Mono.from(callback.doInBucket(bucket)));
|
||||
}
|
||||
|
||||
protected GridFSBucket getGridFs() {
|
||||
/**
|
||||
* Create a reusable Flux for a {@link ReactiveBucketCallback}. It's up to the developer to choose to obtain a new
|
||||
* {@link Flux} or to reuse the {@link Flux}.
|
||||
*
|
||||
* @param callback must not be {@literal null}
|
||||
* @return a {@link Flux} wrapping the {@link ReactiveBucketCallback}.
|
||||
*/
|
||||
public <T> Flux<T> createFlux(ReactiveBucketCallback<T> callback) {
|
||||
|
||||
MongoDatabase db = dbFactory.getMongoDatabase();
|
||||
return bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket);
|
||||
Assert.notNull(callback, "ReactiveBucketCallback must not be null!");
|
||||
|
||||
return doGetBucket().flatMapMany(callback::doInBucket);
|
||||
}
|
||||
|
||||
protected Mono<GridFSBucket> doGetBucket() {
|
||||
return dbFactory.getMongoDatabase()
|
||||
.map(db -> bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param <T>
|
||||
* @author Mathieu Ouellet
|
||||
* @since 3.0
|
||||
*/
|
||||
interface ReactiveBucketCallback<T> {
|
||||
Publisher<T> doInBucket(GridFSBucket bucket);
|
||||
}
|
||||
|
||||
private static class FindCallback implements ReactiveBucketCallback<GridFSFile> {
|
||||
|
||||
private final Query query;
|
||||
private final Document queryObject;
|
||||
private final Document sortObject;
|
||||
|
||||
public FindCallback(Query query, Document queryObject, Document sortObject) {
|
||||
|
||||
this.query = query;
|
||||
this.queryObject = queryObject;
|
||||
this.sortObject = sortObject;
|
||||
}
|
||||
|
||||
public GridFSFindPublisher doInBucket(GridFSBucket bucket) {
|
||||
|
||||
GridFSFindPublisher findPublisher = bucket.find(queryObject).sort(sortObject);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
findPublisher = findPublisher.limit(query.getLimit());
|
||||
}
|
||||
|
||||
if (query.getSkip() > 0) {
|
||||
findPublisher = findPublisher.skip(Math.toIntExact(query.getSkip()));
|
||||
}
|
||||
|
||||
Integer cursorBatchSize = query.getMeta().getCursorBatchSize();
|
||||
if (cursorBatchSize != null) {
|
||||
findPublisher = findPublisher.batchSize(cursorBatchSize);
|
||||
}
|
||||
|
||||
return findPublisher;
|
||||
}
|
||||
}
|
||||
|
||||
private static class FindLimitCallback extends FindCallback {
|
||||
|
||||
private final int limit;
|
||||
|
||||
public FindLimitCallback(Query query, Document queryObject, Document sortObject, int limit) {
|
||||
|
||||
super(query, queryObject, sortObject);
|
||||
this.limit = limit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFSFindPublisher doInBucket(GridFSBucket bucket) {
|
||||
return super.doInBucket(bucket).limit(limit);
|
||||
}
|
||||
}
|
||||
|
||||
private static class UploadCallback implements ReactiveBucketCallback<Void> {
|
||||
|
||||
private final BsonValue fileId;
|
||||
private final String filename;
|
||||
private final Publisher<ByteBuffer> source;
|
||||
private final GridFSUploadOptions uploadOptions;
|
||||
|
||||
public UploadCallback(BsonValue fileId, String filename, Publisher<ByteBuffer> source,
|
||||
GridFSUploadOptions uploadOptions) {
|
||||
|
||||
this.fileId = fileId;
|
||||
this.filename = filename;
|
||||
this.source = source;
|
||||
this.uploadOptions = uploadOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFSUploadPublisher<Void> doInBucket(GridFSBucket bucket) {
|
||||
return bucket.uploadFromPublisher(fileId, filename, source, uploadOptions);
|
||||
}
|
||||
}
|
||||
|
||||
private static class AutoIdCreatingUploadCallback implements ReactiveBucketCallback<ObjectId> {
|
||||
|
||||
private final String filename;
|
||||
private final Publisher<ByteBuffer> source;
|
||||
private final GridFSUploadOptions uploadOptions;
|
||||
|
||||
public AutoIdCreatingUploadCallback(String filename, Publisher<ByteBuffer> source,
|
||||
GridFSUploadOptions uploadOptions) {
|
||||
|
||||
this.filename = filename;
|
||||
this.source = source;
|
||||
this.uploadOptions = uploadOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFSUploadPublisher<ObjectId> doInBucket(GridFSBucket bucket) {
|
||||
return bucket.uploadFromPublisher(filename, source, uploadOptions);
|
||||
}
|
||||
}
|
||||
|
||||
private static class DeleteCallback implements ReactiveBucketCallback<Void> {
|
||||
|
||||
private final BsonValue id;
|
||||
|
||||
public DeleteCallback(BsonValue id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Publisher<Void> doInBucket(GridFSBucket bucket) {
|
||||
return bucket.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.gridfs;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.springframework.core.io.buffer.DataBuffer;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
|
||||
/**
|
||||
* Upload descriptor for a GridFS file upload.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ReactiveGridFsUpload<ID> implements GridFsObject<ID, Publisher<DataBuffer>> {
|
||||
|
||||
private final @Nullable ID id;
|
||||
private final Publisher<DataBuffer> dataStream;
|
||||
private final String filename;
|
||||
private final Options options;
|
||||
|
||||
private ReactiveGridFsUpload(@Nullable ID id, Publisher<DataBuffer> dataStream, String filename, Options options) {
|
||||
|
||||
Assert.notNull(dataStream, "Data Stream must not be null");
|
||||
Assert.notNull(filename, "Filename must not be null");
|
||||
Assert.notNull(options, "Options must not be null");
|
||||
|
||||
this.id = id;
|
||||
this.dataStream = dataStream;
|
||||
this.filename = filename;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link GridFSFile#getId()} value converted into its simple java type. <br />
|
||||
* A {@link org.bson.BsonString} will be converted to plain {@link String}.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getFileId()
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
public ID getFileId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getFielname()
|
||||
*/
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return filename;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getContent()
|
||||
*/
|
||||
@Override
|
||||
public Publisher<DataBuffer> getContent() {
|
||||
return dataStream;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.gridfs.GridFsObject#getOptions()
|
||||
*/
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link ReactiveGridFsUpload} for the given {@link Publisher}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @return new instance of {@link GridFsUpload}.
|
||||
*/
|
||||
public static ReactiveGridFsUploadBuilder<ObjectId> fromPublisher(Publisher<DataBuffer> source) {
|
||||
return new ReactiveGridFsUploadBuilder<ObjectId>().content(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder to create {@link ReactiveGridFsUpload} in a fluent way.
|
||||
*
|
||||
* @param <T> the target id type.
|
||||
*/
|
||||
public static class ReactiveGridFsUploadBuilder<T> {
|
||||
|
||||
private @Nullable Object id;
|
||||
private Publisher<DataBuffer> dataStream;
|
||||
private String filename;
|
||||
private Options options = Options.none();
|
||||
|
||||
private ReactiveGridFsUploadBuilder() {}
|
||||
|
||||
/**
|
||||
* Define the content of the file to upload.
|
||||
*
|
||||
* @param source the upload content.
|
||||
* @return this.
|
||||
*/
|
||||
public ReactiveGridFsUploadBuilder<T> content(Publisher<DataBuffer> source) {
|
||||
this.dataStream = source;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the id to use.
|
||||
*
|
||||
* @param id the id to save the content to.
|
||||
* @param <T1>
|
||||
* @return this.
|
||||
*/
|
||||
public <T1> ReactiveGridFsUploadBuilder<T1> id(T1 id) {
|
||||
|
||||
this.id = id;
|
||||
return (ReactiveGridFsUploadBuilder<T1>) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filename.
|
||||
*
|
||||
* @param filename the filename to use.
|
||||
* @return this.
|
||||
*/
|
||||
public ReactiveGridFsUploadBuilder<T> filename(String filename) {
|
||||
|
||||
this.filename = filename;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set additional file information.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public ReactiveGridFsUploadBuilder<T> options(Options options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null");
|
||||
|
||||
this.options = options;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the file metadata.
|
||||
*
|
||||
* @param metadata must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ReactiveGridFsUploadBuilder<T> metadata(Document metadata) {
|
||||
|
||||
this.options = this.options.metadata(metadata);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the upload chunk size in bytes.
|
||||
*
|
||||
* @param chunkSize use negative number for default.
|
||||
* @return
|
||||
*/
|
||||
public ReactiveGridFsUploadBuilder<T> chunkSize(int chunkSize) {
|
||||
|
||||
this.options = this.options.chunkSize(chunkSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set id, filename, metadata and chunk size from given file.
|
||||
*
|
||||
* @param gridFSFile must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public ReactiveGridFsUploadBuilder<T> gridFsFile(GridFSFile gridFSFile) {
|
||||
|
||||
Assert.notNull(gridFSFile, "GridFSFile must not be null");
|
||||
|
||||
this.id = gridFSFile.getId();
|
||||
this.filename = gridFSFile.getFilename();
|
||||
this.options = this.options.metadata(gridFSFile.getMetadata());
|
||||
this.options = this.options.chunkSize(gridFSFile.getChunkSize());
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the content type.
|
||||
*
|
||||
* @param contentType must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public ReactiveGridFsUploadBuilder<T> contentType(String contentType) {
|
||||
|
||||
this.options = this.options.contentType(contentType);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReactiveGridFsUpload<T> build() {
|
||||
return new ReactiveGridFsUpload(id, dataStream, filename, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,6 +20,7 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
@@ -71,6 +72,10 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
protected Object doExecute(MongoQueryMethod method, ResultProcessor resultProcessor,
|
||||
ConvertingParameterAccessor accessor, Class<?> typeToRead) {
|
||||
|
||||
if (method.isPageQuery() || method.isSliceQuery()) {
|
||||
throw new InvalidMongoDbApiUsageException(String.format("Repository aggregation method '%s' does not support '%s' return type. Please use eg. 'List' instead.", method.getName(), method.getReturnType().getType().getSimpleName()));
|
||||
}
|
||||
|
||||
Class<?> sourceType = method.getDomainClass();
|
||||
Class<?> targetType = typeToRead;
|
||||
|
||||
|
||||
@@ -54,9 +54,9 @@ public class MongoAnnotationProcessor extends AbstractQuerydslProcessor {
|
||||
|
||||
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, "Running " + getClass().getSimpleName());
|
||||
|
||||
DefaultConfiguration configuration = new DefaultConfiguration(roundEnv, processingEnv.getOptions(),
|
||||
Collections.emptySet(), QueryEntities.class, Document.class, QuerySupertype.class,
|
||||
QueryEmbeddable.class, QueryEmbedded.class, QueryTransient.class);
|
||||
DefaultConfiguration configuration = new DefaultConfiguration(processingEnv, roundEnv, Collections.emptySet(),
|
||||
QueryEntities.class, Document.class, QuerySupertype.class, QueryEmbeddable.class, QueryEmbedded.class,
|
||||
QueryTransient.class);
|
||||
configuration.setUnknownAsEmbedded(true);
|
||||
|
||||
return configuration;
|
||||
|
||||
@@ -18,8 +18,11 @@ package org.springframework.data.mongodb.repository.support;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonJavaScript;
|
||||
@@ -49,6 +52,7 @@ import com.querydsl.mongodb.MongodbOps;
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mikhail Kaduchka
|
||||
* @author Enrique Leon Molina
|
||||
* @since 2.1
|
||||
*/
|
||||
abstract class MongodbDocumentSerializer implements Visitor<Object, Void> {
|
||||
@@ -181,22 +185,30 @@ abstract class MongodbDocumentSerializer implements Visitor<Object, Void> {
|
||||
return asDocument(asDBKey(expr, 0), "");
|
||||
} else if (op == Ops.AND) {
|
||||
|
||||
Map<Object, Object> lhs = (Map<Object, Object>) handle(expr.getArg(0));
|
||||
Map<Object, Object> rhs = (Map<Object, Object>) handle(expr.getArg(1));
|
||||
Queue<Map<Object, Object>> pendingDocuments = collectConnectorArgs("$and", expr);
|
||||
List<Map<Object, Object>> unmergeableDocuments = new ArrayList<>();
|
||||
List<Map<Object, Object>> generatedDocuments = new ArrayList<>();
|
||||
|
||||
LinkedHashSet<Object> lhs2 = new LinkedHashSet<>(lhs.keySet());
|
||||
lhs2.retainAll(rhs.keySet());
|
||||
while (!pendingDocuments.isEmpty()) {
|
||||
|
||||
if (lhs2.isEmpty()) {
|
||||
lhs.putAll(rhs);
|
||||
return lhs;
|
||||
} else {
|
||||
List<Object> list = new ArrayList<>(2);
|
||||
list.add(handle(expr.getArg(0)));
|
||||
list.add(handle(expr.getArg(1)));
|
||||
return asDocument("$and", list);
|
||||
Map<Object, Object> lhs = pendingDocuments.poll();
|
||||
|
||||
for (Map<Object, Object> rhs : pendingDocuments) {
|
||||
Set<Object> lhs2 = new LinkedHashSet<>(lhs.keySet());
|
||||
lhs2.retainAll(rhs.keySet());
|
||||
if (lhs2.isEmpty()) {
|
||||
lhs.putAll(rhs);
|
||||
} else {
|
||||
unmergeableDocuments.add(rhs);
|
||||
}
|
||||
}
|
||||
|
||||
generatedDocuments.add(lhs);
|
||||
pendingDocuments = new LinkedList<>(unmergeableDocuments);
|
||||
unmergeableDocuments = new LinkedList<>();
|
||||
}
|
||||
|
||||
return generatedDocuments.size() == 1 ? generatedDocuments.get(0) : asDocument("$and", generatedDocuments);
|
||||
} else if (op == Ops.NOT) {
|
||||
// Handle the not's child
|
||||
Operation<?> subOperation = (Operation<?>) expr.getArg(0);
|
||||
@@ -211,12 +223,7 @@ abstract class MongodbDocumentSerializer implements Visitor<Object, Void> {
|
||||
}
|
||||
|
||||
} else if (op == Ops.OR) {
|
||||
|
||||
List<Object> list = new ArrayList<>(2);
|
||||
list.add(handle(expr.getArg(0)));
|
||||
list.add(handle(expr.getArg(1)));
|
||||
return asDocument("$or", list);
|
||||
|
||||
return asDocument("$or", collectConnectorArgs("$or", expr));
|
||||
} else if (op == Ops.NE) {
|
||||
|
||||
Path<?> path = (Path<?>) expr.getArg(0);
|
||||
@@ -434,4 +441,19 @@ abstract class MongodbDocumentSerializer implements Visitor<Object, Void> {
|
||||
public Object visit(ParamExpression<?> expr, Void context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private LinkedList<Map<Object, Object>> collectConnectorArgs(String operator, Operation<?> operation) {
|
||||
|
||||
LinkedList<Map<Object, Object>> pendingDocuments = new LinkedList<>();
|
||||
for (Expression<?> exp : operation.getArgs()) {
|
||||
Map<Object, Object> document = (Map<Object, Object>) handle(exp);
|
||||
if (document.keySet().size() == 1 && document.containsKey(operator)) {
|
||||
pendingDocuments.addAll((Collection<Map<Object, Object>>) document.get(operator));
|
||||
} else {
|
||||
pendingDocuments.add(document);
|
||||
}
|
||||
}
|
||||
return pendingDocuments;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,11 +23,20 @@ import java.util.StringJoiner;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import org.bson.BsonBinary;
|
||||
import org.bson.BsonBoolean;
|
||||
import org.bson.BsonDouble;
|
||||
import org.bson.BsonInt32;
|
||||
import org.bson.BsonInt64;
|
||||
import org.bson.BsonObjectId;
|
||||
import org.bson.BsonString;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.DocumentCodec;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.json.JsonParseException;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -54,13 +63,15 @@ public class BsonUtils {
|
||||
}
|
||||
|
||||
public static Map<String, Object> asMap(Bson bson) {
|
||||
|
||||
if (bson instanceof Document) {
|
||||
return (Document) bson;
|
||||
}
|
||||
if (bson instanceof BasicDBObject) {
|
||||
return ((BasicDBObject) bson);
|
||||
}
|
||||
throw new IllegalArgumentException("o_O what's that? Cannot read values from " + bson.getClass());
|
||||
|
||||
return (Map) bson.toBsonDocument(Document.class, MongoClientSettings.getDefaultCodecRegistry());
|
||||
}
|
||||
|
||||
public static void addToMap(Bson bson, String key, @Nullable Object value) {
|
||||
@@ -118,6 +129,56 @@ public class BsonUtils {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a given simple value (eg. {@link String}, {@link Long}) to its corresponding {@link BsonValue}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @return the corresponding {@link BsonValue} representation.
|
||||
* @throws IllegalArgumentException if {@literal source} does not correspond to a {@link BsonValue} type.
|
||||
* @since 3.0
|
||||
*/
|
||||
public static BsonValue simpleToBsonValue(Object source) {
|
||||
|
||||
if (source instanceof BsonValue) {
|
||||
return (BsonValue) source;
|
||||
}
|
||||
|
||||
if (source instanceof ObjectId) {
|
||||
return new BsonObjectId((ObjectId) source);
|
||||
}
|
||||
|
||||
if (source instanceof String) {
|
||||
return new BsonString((String) source);
|
||||
}
|
||||
|
||||
if (source instanceof Double) {
|
||||
return new BsonDouble((Double) source);
|
||||
}
|
||||
|
||||
if (source instanceof Integer) {
|
||||
return new BsonInt32((Integer) source);
|
||||
}
|
||||
|
||||
if (source instanceof Long) {
|
||||
return new BsonInt64((Long) source);
|
||||
}
|
||||
|
||||
if (source instanceof byte[]) {
|
||||
return new BsonBinary((byte[]) source);
|
||||
}
|
||||
|
||||
if (source instanceof Boolean) {
|
||||
return new BsonBoolean((Boolean) source);
|
||||
}
|
||||
|
||||
if (source instanceof Float) {
|
||||
return new BsonDouble((Float) source);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Unable to convert %s (%s) to BsonValue.", source,
|
||||
source != null ? source.getClass().getName() : "null"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge the given {@link Document documents} into on in the given order. Keys contained within multiple documents are
|
||||
* overwritten by their follow ups.
|
||||
|
||||
@@ -219,7 +219,7 @@ class JsonScanner {
|
||||
boolean isExpression = false;
|
||||
int parenthesisCount = 0;
|
||||
|
||||
while (c == '$' || c == '_' || Character.isLetterOrDigit(c) || c == '#' || c == '{' || c == '[' || c == ']'
|
||||
while (c == '$' || c == '_' || Character.isLetterOrDigit(c) || c == '#' || c == '{' || c == '['
|
||||
|| (isExpression && isExpressionAllowedChar(c))) {
|
||||
|
||||
if (charCount == 0 && c == '#') {
|
||||
@@ -231,7 +231,7 @@ class JsonScanner {
|
||||
|
||||
parenthesisCount--;
|
||||
if (parenthesisCount == 0) {
|
||||
buffer.read();
|
||||
c = buffer.read();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,6 +224,16 @@ The base package in which to scan for entities annotated with @Document
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
</xsd:attribute>
|
||||
<xsd:attribute name="auto-index-creation" use="optional">
|
||||
<xsd:annotation>
|
||||
<xsd:documentation>
|
||||
Enable/Disable index creation for annotated properties/entities.
|
||||
</xsd:documentation>
|
||||
</xsd:annotation>
|
||||
<xsd:simpleType>
|
||||
<xsd:union memberTypes="xsd:boolean xsd:string"/>
|
||||
</xsd:simpleType>
|
||||
</xsd:attribute>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration;
|
||||
|
||||
@@ -46,4 +49,9 @@ public class ConfigClassInDefaultPackage extends AbstractMongoClientConfiguratio
|
||||
public MongoClient mongoClient() {
|
||||
return MongoClients.create();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Class<?>> getInitialEntitySet() throws ClassNotFoundException {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,9 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import org.junit.Test;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
|
||||
/**
|
||||
|
||||
@@ -16,11 +16,11 @@
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import static de.schauderhaft.degraph.check.JCheck.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
|
||||
import de.schauderhaft.degraph.configuration.NamedPattern;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
/**
|
||||
* Tests package dependency constraints.
|
||||
@@ -28,10 +28,10 @@ import org.junit.Test;
|
||||
* @author Jens Schauder
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class DependencyTests {
|
||||
class DependencyTests {
|
||||
|
||||
@Test
|
||||
public void noInternalPackageCycles() {
|
||||
void noInternalPackageCycles() {
|
||||
|
||||
assertThat(classpath() //
|
||||
.noJars() //
|
||||
@@ -43,7 +43,7 @@ public class DependencyTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void onlyConfigMayUseRepository() {
|
||||
void onlyConfigMayUseRepository() {
|
||||
|
||||
assertThat(classpath() //
|
||||
.including("org.springframework.data.**") //
|
||||
@@ -60,7 +60,7 @@ public class DependencyTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void commonsInternaly() {
|
||||
void commonsInternaly() {
|
||||
|
||||
assertThat(classpath() //
|
||||
.noJars() //
|
||||
|
||||
@@ -22,12 +22,11 @@ import static org.mockito.Mockito.*;
|
||||
import javax.transaction.Status;
|
||||
import javax.transaction.UserTransaction;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.jta.JtaTransactionManager;
|
||||
@@ -42,8 +41,8 @@ import com.mongodb.session.ServerSession;
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class MongoDatabaseUtilsUnitTests {
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class MongoDatabaseUtilsUnitTests {
|
||||
|
||||
@Mock ClientSession session;
|
||||
@Mock ServerSession serverSession;
|
||||
@@ -52,23 +51,8 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
|
||||
@Mock UserTransaction userTransaction;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
when(dbFactory.getSession(any())).thenReturn(session);
|
||||
|
||||
when(dbFactory.withSession(session)).thenReturn(dbFactory);
|
||||
|
||||
when(dbFactory.getMongoDatabase()).thenReturn(db);
|
||||
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(session.hasActiveTransaction()).thenReturn(true);
|
||||
|
||||
when(serverSession.isClosed()).thenReturn(false);
|
||||
}
|
||||
|
||||
@After
|
||||
public void verifyTransactionSynchronizationManagerState() {
|
||||
@AfterEach
|
||||
void verifyTransactionSynchronizationManagerState() {
|
||||
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty()).isTrue();
|
||||
assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isFalse();
|
||||
@@ -79,7 +63,7 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2130
|
||||
public void isTransactionActiveShouldDetectTxViaFactory() {
|
||||
void isTransactionActiveShouldDetectTxViaFactory() {
|
||||
|
||||
when(dbFactory.isTransactionActive()).thenReturn(true);
|
||||
|
||||
@@ -87,7 +71,7 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2130
|
||||
public void isTransactionActiveShouldReturnFalseIfNoTxActive() {
|
||||
void isTransactionActiveShouldReturnFalseIfNoTxActive() {
|
||||
|
||||
when(dbFactory.isTransactionActive()).thenReturn(false);
|
||||
|
||||
@@ -95,7 +79,12 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2130
|
||||
public void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxManager() {
|
||||
void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxManager() {
|
||||
|
||||
when(dbFactory.getSession(any())).thenReturn(session);
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(session.hasActiveTransaction()).thenReturn(true);
|
||||
when(serverSession.isClosed()).thenReturn(false);
|
||||
|
||||
when(dbFactory.isTransactionActive()).thenReturn(false);
|
||||
|
||||
@@ -112,7 +101,7 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1920
|
||||
public void shouldNotStartSessionWhenNoTransactionOngoing() {
|
||||
void shouldNotStartSessionWhenNoTransactionOngoing() {
|
||||
|
||||
MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
|
||||
@@ -121,7 +110,14 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1920
|
||||
public void shouldParticipateInOngoingJtaTransactionWithCommitWhenSessionSychronizationIsAny() throws Exception {
|
||||
void shouldParticipateInOngoingJtaTransactionWithCommitWhenSessionSychronizationIsAny() throws Exception {
|
||||
|
||||
when(dbFactory.getSession(any())).thenReturn(session);
|
||||
when(dbFactory.withSession(session)).thenReturn(dbFactory);
|
||||
when(dbFactory.getMongoDatabase()).thenReturn(db);
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(session.hasActiveTransaction()).thenReturn(true);
|
||||
when(serverSession.isClosed()).thenReturn(false);
|
||||
|
||||
when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE,
|
||||
Status.STATUS_ACTIVE);
|
||||
@@ -152,7 +148,14 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1920
|
||||
public void shouldParticipateInOngoingJtaTransactionWithRollbackWhenSessionSychronizationIsAny() throws Exception {
|
||||
void shouldParticipateInOngoingJtaTransactionWithRollbackWhenSessionSychronizationIsAny() throws Exception {
|
||||
|
||||
when(dbFactory.getSession(any())).thenReturn(session);
|
||||
when(dbFactory.withSession(session)).thenReturn(dbFactory);
|
||||
when(dbFactory.getMongoDatabase()).thenReturn(db);
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(session.hasActiveTransaction()).thenReturn(true);
|
||||
when(serverSession.isClosed()).thenReturn(false);
|
||||
|
||||
when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE,
|
||||
Status.STATUS_ACTIVE);
|
||||
@@ -185,8 +188,7 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1920
|
||||
public void shouldNotParticipateInOngoingJtaTransactionWithRollbackWhenSessionSychronizationIsNative()
|
||||
throws Exception {
|
||||
void shouldNotParticipateInOngoingJtaTransactionWithRollbackWhenSessionSychronizationIsNative() throws Exception {
|
||||
|
||||
when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE,
|
||||
Status.STATUS_ACTIVE);
|
||||
@@ -219,7 +221,13 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1920
|
||||
public void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsNative() {
|
||||
void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsNative() {
|
||||
|
||||
when(dbFactory.getSession(any())).thenReturn(session);
|
||||
when(dbFactory.withSession(session)).thenReturn(dbFactory);
|
||||
when(dbFactory.getMongoDatabase()).thenReturn(db);
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(serverSession.isClosed()).thenReturn(false);
|
||||
|
||||
MongoTransactionManager txManager = new MongoTransactionManager(dbFactory);
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(txManager);
|
||||
@@ -245,7 +253,13 @@ public class MongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1920
|
||||
public void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsAny() {
|
||||
void shouldParticipateInOngoingMongoTransactionWhenSessionSynchronizationIsAny() {
|
||||
|
||||
when(dbFactory.getSession(any())).thenReturn(session);
|
||||
when(dbFactory.withSession(session)).thenReturn(dbFactory);
|
||||
when(dbFactory.getMongoDatabase()).thenReturn(db);
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(serverSession.isClosed()).thenReturn(false);
|
||||
|
||||
MongoTransactionManager txManager = new MongoTransactionManager(dbFactory);
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(txManager);
|
||||
|
||||
@@ -18,12 +18,12 @@ package org.springframework.data.mongodb;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
@@ -41,7 +41,7 @@ import com.mongodb.session.ServerSession;
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class MongoTransactionManagerUnitTests {
|
||||
|
||||
@Mock ClientSession session;
|
||||
@@ -52,24 +52,16 @@ public class MongoTransactionManagerUnitTests {
|
||||
@Mock MongoDatabase db;
|
||||
@Mock MongoDatabase db2;
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
|
||||
when(dbFactory.getSession(any())).thenReturn(session, session2);
|
||||
|
||||
when(dbFactory.withSession(session)).thenReturn(dbFactory);
|
||||
when(dbFactory.withSession(session2)).thenReturn(dbFactory2);
|
||||
|
||||
when(dbFactory.getMongoDatabase()).thenReturn(db);
|
||||
when(dbFactory2.getMongoDatabase()).thenReturn(db2);
|
||||
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(session2.getServerSession()).thenReturn(serverSession);
|
||||
|
||||
when(serverSession.isClosed()).thenReturn(false);
|
||||
}
|
||||
|
||||
@After
|
||||
@AfterEach
|
||||
public void verifyTransactionSynchronizationManager() {
|
||||
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty()).isTrue();
|
||||
@@ -238,6 +230,11 @@ public class MongoTransactionManagerUnitTests {
|
||||
@Test // DATAMONGO-1920
|
||||
public void suspendTransactionWhilePropagationRequiresNew() {
|
||||
|
||||
when(dbFactory.withSession(session2)).thenReturn(dbFactory2);
|
||||
when(dbFactory2.getMongoDatabase()).thenReturn(db2);
|
||||
when(session2.getServerSession()).thenReturn(serverSession);
|
||||
when(serverSession.isClosed()).thenReturn(false);
|
||||
|
||||
MongoTransactionManager txManager = new MongoTransactionManager(dbFactory);
|
||||
TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition());
|
||||
|
||||
|
||||
@@ -22,11 +22,10 @@ import static org.mockito.Mockito.*;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.reactive.TransactionalOperator;
|
||||
@@ -41,27 +40,18 @@ import com.mongodb.session.ServerSession;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class ReactiveMongoDatabaseUtilsUnitTests {
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class ReactiveMongoDatabaseUtilsUnitTests {
|
||||
|
||||
@Mock ClientSession session;
|
||||
@Mock ServerSession serverSession;
|
||||
@Mock ReactiveMongoDatabaseFactory databaseFactory;
|
||||
@Mock MongoDatabase db;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
when(databaseFactory.getSession(any())).thenReturn(Mono.just(session));
|
||||
when(databaseFactory.getMongoDatabase()).thenReturn(db);
|
||||
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(session.hasActiveTransaction()).thenReturn(true);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void isTransactionActiveShouldDetectTxViaFactory() {
|
||||
void isTransactionActiveShouldDetectTxViaFactory() {
|
||||
|
||||
when(databaseFactory.isTransactionActive()).thenReturn(true);
|
||||
|
||||
@@ -71,7 +61,7 @@ public class ReactiveMongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void isTransactionActiveShouldReturnFalseIfNoTxActive() {
|
||||
void isTransactionActiveShouldReturnFalseIfNoTxActive() {
|
||||
|
||||
when(databaseFactory.isTransactionActive()).thenReturn(false);
|
||||
|
||||
@@ -81,8 +71,11 @@ public class ReactiveMongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxManager() {
|
||||
void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxManager() {
|
||||
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(session.hasActiveTransaction()).thenReturn(true);
|
||||
when(databaseFactory.getSession(any())).thenReturn(Mono.just(session));
|
||||
when(databaseFactory.isTransactionActive()).thenReturn(false);
|
||||
when(session.commitTransaction()).thenReturn(Mono.empty());
|
||||
|
||||
@@ -96,7 +89,9 @@ public class ReactiveMongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void shouldNotStartSessionWhenNoTransactionOngoing() {
|
||||
void shouldNotStartSessionWhenNoTransactionOngoing() {
|
||||
|
||||
when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db));
|
||||
|
||||
ReactiveMongoDatabaseUtils.getDatabase(databaseFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION) //
|
||||
.as(StepVerifier::create) //
|
||||
@@ -108,7 +103,10 @@ public class ReactiveMongoDatabaseUtilsUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsNative() {
|
||||
void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsNative() {
|
||||
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(databaseFactory.getSession(any())).thenReturn(Mono.just(session));
|
||||
|
||||
ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory);
|
||||
when(session.abortTransaction()).thenReturn(Mono.empty());
|
||||
|
||||
@@ -15,24 +15,21 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.reactive.TransactionalOperator;
|
||||
import org.springframework.transaction.support.DefaultTransactionDefinition;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
@@ -43,9 +40,10 @@ import com.mongodb.session.ServerSession;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class ReactiveMongoTransactionManagerUnitTests {
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class ReactiveMongoTransactionManagerUnitTests {
|
||||
|
||||
@Mock ClientSession session;
|
||||
@Mock ClientSession session2;
|
||||
@@ -55,30 +53,16 @@ public class ReactiveMongoTransactionManagerUnitTests {
|
||||
@Mock MongoDatabase db;
|
||||
@Mock MongoDatabase db2;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
when(databaseFactory.getSession(any())).thenReturn(Mono.just(session), Mono.just(session2));
|
||||
|
||||
when(databaseFactory.withSession(session)).thenReturn(databaseFactory);
|
||||
when(databaseFactory.withSession(session2)).thenReturn(databaseFactory2);
|
||||
|
||||
when(databaseFactory.getMongoDatabase()).thenReturn(db);
|
||||
when(databaseFactory2.getMongoDatabase()).thenReturn(db2);
|
||||
|
||||
when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db));
|
||||
when(session.getServerSession()).thenReturn(serverSession);
|
||||
when(session2.getServerSession()).thenReturn(serverSession);
|
||||
}
|
||||
|
||||
@After
|
||||
public void verifyTransactionSynchronizationManager() {
|
||||
|
||||
assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty()).isTrue();
|
||||
assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isFalse();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void triggerCommitCorrectly() {
|
||||
void triggerCommitCorrectly() {
|
||||
|
||||
ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory);
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory);
|
||||
@@ -103,7 +87,7 @@ public class ReactiveMongoTransactionManagerUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void participateInOnGoingTransactionWithCommit() {
|
||||
void participateInOnGoingTransactionWithCommit() {
|
||||
|
||||
ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory);
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory);
|
||||
@@ -131,7 +115,7 @@ public class ReactiveMongoTransactionManagerUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void participateInOnGoingTransactionWithRollbackOnly() {
|
||||
void participateInOnGoingTransactionWithRollbackOnly() {
|
||||
|
||||
ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory);
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory);
|
||||
@@ -156,7 +140,7 @@ public class ReactiveMongoTransactionManagerUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void suspendTransactionWhilePropagationNotSupported() {
|
||||
void suspendTransactionWhilePropagationNotSupported() {
|
||||
|
||||
ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory);
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory);
|
||||
@@ -195,7 +179,11 @@ public class ReactiveMongoTransactionManagerUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void suspendTransactionWhilePropagationRequiresNew() {
|
||||
void suspendTransactionWhilePropagationRequiresNew() {
|
||||
|
||||
when(databaseFactory.withSession(session2)).thenReturn(databaseFactory2);
|
||||
when(databaseFactory2.getMongoDatabase()).thenReturn(Mono.just(db2));
|
||||
when(session2.getServerSession()).thenReturn(serverSession);
|
||||
|
||||
ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory);
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory);
|
||||
@@ -237,7 +225,7 @@ public class ReactiveMongoTransactionManagerUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
public void readonlyShouldInitiateASessionStartAndCommitTransaction() {
|
||||
void readonlyShouldInitiateASessionStartAndCommitTransaction() {
|
||||
|
||||
ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory);
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory);
|
||||
|
||||
@@ -23,6 +23,8 @@ import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
@@ -238,6 +240,11 @@ public class ReactiveTransactionIntegrationTests {
|
||||
public ReactiveMongoTransactionManager transactionManager(ReactiveMongoDatabaseFactory factory) {
|
||||
return new ReactiveMongoTransactionManager(factory);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Class<?>> getInitialEntitySet() {
|
||||
return Collections.singleton(Person.class);
|
||||
}
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
|
||||
@@ -24,11 +24,12 @@ import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor.MethodCache;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
@@ -44,7 +45,7 @@ import com.mongodb.client.MongoDatabase;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class SessionAwareMethodInterceptorUnitTests {
|
||||
|
||||
@Mock ClientSession session;
|
||||
@@ -54,7 +55,7 @@ public class SessionAwareMethodInterceptorUnitTests {
|
||||
MongoCollection collection;
|
||||
MongoDatabase database;
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
|
||||
collection = createProxyInstance(session, targetCollection, MongoCollection.class);
|
||||
|
||||
@@ -17,6 +17,9 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.springframework.data.mongodb.test.util.Assertions.*;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
@@ -54,6 +57,16 @@ public abstract class AbstractIntegrationTests {
|
||||
public MongoClient mongoClient() {
|
||||
return MongoTestUtils.client();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Class<?>> getInitialEntitySet() throws ClassNotFoundException {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean autoIndexCreation() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@Autowired MongoOperations operations;
|
||||
|
||||
@@ -25,7 +25,8 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
@@ -33,6 +34,7 @@ import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.support.AbstractApplicationContext;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoTypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -89,7 +91,7 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
public void returnsUninitializedMappingContext() throws Exception {
|
||||
|
||||
SampleMongoConfiguration configuration = new SampleMongoConfiguration();
|
||||
MongoMappingContext context = configuration.mongoMappingContext();
|
||||
MongoMappingContext context = configuration.mongoMappingContext(configuration.customConversions());
|
||||
|
||||
assertThat(context.getPersistentEntities()).isEmpty();
|
||||
context.initialize();
|
||||
@@ -158,11 +160,10 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
return MongoClients.create();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Override
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
MappingMongoConverter converter = super.mappingMongoConverter();
|
||||
public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
MappingMongoConverter converter = super.mappingMongoConverter(databaseFactory, customConversions, mappingContext);
|
||||
converter.setTypeMapper(typeMapper());
|
||||
|
||||
return converter;
|
||||
@@ -172,6 +173,7 @@ public class AbstractMongoConfigurationUnitTests {
|
||||
public MongoTypeMapper typeMapper() {
|
||||
return new CustomMongoTypeMapper();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class ConfigurationWithMultipleBasePackages extends AbstractMongoClientConfiguration {
|
||||
|
||||
@@ -25,15 +25,17 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.Mockito;
|
||||
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.support.AbstractApplicationContext;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoTypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -89,7 +91,7 @@ public class AbstractReactiveMongoConfigurationUnitTests {
|
||||
public void returnsUninitializedMappingContext() throws Exception {
|
||||
|
||||
SampleMongoConfiguration configuration = new SampleMongoConfiguration();
|
||||
MongoMappingContext context = configuration.mongoMappingContext();
|
||||
MongoMappingContext context = configuration.mongoMappingContext(configuration.customConversions());
|
||||
|
||||
assertThat(context.getPersistentEntities()).isEmpty();
|
||||
context.initialize();
|
||||
@@ -158,11 +160,11 @@ public class AbstractReactiveMongoConfigurationUnitTests {
|
||||
return MongoTestUtils.reactiveClient();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Override
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
|
||||
MappingMongoConverter converter = super.mappingMongoConverter();
|
||||
MappingMongoConverter converter = super.mappingMongoConverter(databaseFactory, customConversions, mappingContext);
|
||||
converter.setTypeMapper(typeMapper());
|
||||
|
||||
return converter;
|
||||
|
||||
@@ -18,7 +18,7 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import org.springframework.context.support.AbstractApplicationContext;
|
||||
import org.springframework.context.support.ClassPathXmlApplicationContext;
|
||||
|
||||
@@ -18,8 +18,11 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
@@ -28,7 +31,9 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan.Filter;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.FilterType;
|
||||
import org.springframework.data.annotation.Version;
|
||||
import org.springframework.data.domain.AuditorAware;
|
||||
import org.springframework.data.mongodb.core.AuditablePerson;
|
||||
@@ -68,7 +73,8 @@ public class AuditingViaJavaConfigRepositoriesTests {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoAuditing(auditorAwareRef = "auditorProvider")
|
||||
@EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true)
|
||||
@EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true,
|
||||
includeFilters = @Filter(type = FilterType.ASSIGNABLE_TYPE, classes = AuditablePersonRepository.class))
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
@@ -86,6 +92,12 @@ public class AuditingViaJavaConfigRepositoriesTests {
|
||||
public AuditorAware<AuditablePerson> auditorProvider() {
|
||||
return mock(AuditorAware.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Class<?>> getInitialEntitySet() throws ClassNotFoundException {
|
||||
return new HashSet<>(
|
||||
Arrays.asList(AuditablePerson.class, VersionedAuditablePerson.class, SimpleVersionedAuditablePerson.class));
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
@@ -212,6 +224,11 @@ public class AuditingViaJavaConfigRepositoriesTests {
|
||||
protected String getDatabaseName() {
|
||||
return "database";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Class<?>> getInitialEntitySet() throws ClassNotFoundException {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
}
|
||||
|
||||
static class VersionedAuditablePerson extends AuditablePerson {
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.BeanReference;
|
||||
|
||||
@@ -17,10 +17,10 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
@@ -30,21 +30,21 @@ import org.springframework.core.type.AnnotationMetadata;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class MongoAuditingRegistrarUnitTests {
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class MongoAuditingRegistrarUnitTests {
|
||||
|
||||
MongoAuditingRegistrar registrar = new MongoAuditingRegistrar();
|
||||
private MongoAuditingRegistrar registrar = new MongoAuditingRegistrar();
|
||||
|
||||
@Mock AnnotationMetadata metadata;
|
||||
@Mock BeanDefinitionRegistry registry;
|
||||
|
||||
@Test // DATAMONGO-792
|
||||
public void rejectsNullAnnotationMetadata() {
|
||||
void rejectsNullAnnotationMetadata() {
|
||||
assertThatIllegalArgumentException().isThrownBy(() -> registrar.registerBeanDefinitions(null, registry));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-792
|
||||
public void rejectsNullBeanDefinitionRegistry() {
|
||||
void rejectsNullBeanDefinitionRegistry() {
|
||||
assertThatIllegalArgumentException().isThrownBy(() -> registrar.registerBeanDefinitions(metadata, null));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,8 +19,8 @@ import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionReader;
|
||||
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
|
||||
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
|
||||
@@ -47,7 +47,7 @@ public class MongoClientParserIntegrationTests {
|
||||
DefaultListableBeanFactory factory;
|
||||
BeanDefinitionReader reader;
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
|
||||
this.factory = new DefaultListableBeanFactory();
|
||||
|
||||
@@ -17,8 +17,8 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.ConstructorArgumentValues;
|
||||
@@ -51,7 +51,7 @@ public class MongoDbFactoryParserIntegrationTests {
|
||||
DefaultListableBeanFactory factory;
|
||||
BeanDefinitionReader reader;
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
factory = new DefaultListableBeanFactory();
|
||||
reader = new XmlBeanDefinitionReader(factory);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user