Compare commits
24 Commits
2.1.0.RC2
...
2.1.0.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a502ffabc3 | ||
|
|
ffe4e9b914 | ||
|
|
914bdd9434 | ||
|
|
3cd9542483 | ||
|
|
586bf858f9 | ||
|
|
3478fd5ab3 | ||
|
|
fa5f523c92 | ||
|
|
2191ab3bba | ||
|
|
a79142931f | ||
|
|
1ba210366d | ||
|
|
16aa611007 | ||
|
|
13e29eb81f | ||
|
|
fe90950880 | ||
|
|
492dec8ecf | ||
|
|
a1ac2f7c1d | ||
|
|
04e53316c6 | ||
|
|
a991b96518 | ||
|
|
d53c5cf5c4 | ||
|
|
90779bbb27 | ||
|
|
892cc2e69a | ||
|
|
a69f1b4d51 | ||
|
|
7859ee1013 | ||
|
|
a58562ba69 | ||
|
|
779b0da358 |
32
pom.xml
32
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>2.1.0.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>2.1.0.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -27,9 +27,9 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.1.0.RC2</springdata.commons>
|
||||
<mongo>3.8.0</mongo>
|
||||
<mongo.reactivestreams>1.9.0</mongo.reactivestreams>
|
||||
<springdata.commons>2.1.0.RELEASE</springdata.commons>
|
||||
<mongo>3.8.2</mongo>
|
||||
<mongo.reactivestreams>1.9.2</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -138,6 +138,24 @@
|
||||
</modules>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>distribute</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
@@ -151,8 +169,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>2.1.0.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>2.1.0.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>2.1.0.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>2.1.0.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.0.RC2</version>
|
||||
<version>2.1.0.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -47,13 +47,18 @@ import com.mongodb.client.ClientSession;
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDbFactory)} instead of a standard {@link MongoDbFactory#getDb()} call.
|
||||
* Spring classes such as {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
*
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
* behavior as outlined in the MongoDB reference manual.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization)
|
||||
*/
|
||||
public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
implements ResourceTransactionManager, InitializingBean {
|
||||
@@ -70,7 +75,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
*
|
||||
* @see #setDbFactory(MongoDbFactory)
|
||||
* @see #setTransactionSynchronization(int)
|
||||
*/
|
||||
@@ -181,7 +186,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCommit(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected void doCommit(DefaultTransactionStatus status) throws TransactionException {
|
||||
protected final void doCommit(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
MongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
@@ -191,14 +196,45 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
try {
|
||||
mongoTransactionObject.commitTransaction();
|
||||
} catch (MongoException ex) {
|
||||
doCommit(mongoTransactionObject);
|
||||
} catch (Exception ex) {
|
||||
|
||||
throw new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook to perform an actual commit of the given transaction.<br />
|
||||
* If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding
|
||||
* {@literal error labels}. <br />
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit. <br />
|
||||
* <code>
|
||||
* <pre>
|
||||
* int retries = 3;
|
||||
* do {
|
||||
* try {
|
||||
* transactionObject.commitTransaction();
|
||||
* break;
|
||||
* } catch (MongoException ex) {
|
||||
* if (!ex.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)) {
|
||||
* throw ex;
|
||||
* }
|
||||
* }
|
||||
* Thread.sleep(500);
|
||||
* } while (--retries > 0);
|
||||
* </pre>
|
||||
* </code>
|
||||
*
|
||||
* @param transactionObject never {@literal null}.
|
||||
* @throws Exception in case of transaction errors.
|
||||
*/
|
||||
protected void doCommit(MongoTransactionObject transactionObject) throws Exception {
|
||||
transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doRollback(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
@@ -386,7 +422,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* @since 2.1
|
||||
* @see MongoResourceHolder
|
||||
*/
|
||||
static class MongoTransactionObject implements SmartTransactionObject {
|
||||
protected static class MongoTransactionObject implements SmartTransactionObject {
|
||||
|
||||
private @Nullable MongoResourceHolder resourceHolder;
|
||||
|
||||
@@ -406,7 +442,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
/**
|
||||
* @return {@literal true} if a {@link MongoResourceHolder} is set.
|
||||
*/
|
||||
boolean hasResourceHolder() {
|
||||
final boolean hasResourceHolder() {
|
||||
return resourceHolder != null;
|
||||
}
|
||||
|
||||
@@ -428,14 +464,14 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
/**
|
||||
* Commit the transaction.
|
||||
*/
|
||||
void commitTransaction() {
|
||||
public void commitTransaction() {
|
||||
getRequiredSession().commitTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback (abort) the transaction.
|
||||
*/
|
||||
void abortTransaction() {
|
||||
public void abortTransaction() {
|
||||
getRequiredSession().abortTransaction();
|
||||
}
|
||||
|
||||
@@ -451,7 +487,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
@Nullable
|
||||
ClientSession getSession() {
|
||||
public ClientSession getSession() {
|
||||
return resourceHolder != null ? resourceHolder.getSession() : null;
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,6 @@ import lombok.AllArgsConstructor;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@@ -95,7 +94,6 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.projection.ProjectionInformation;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
@@ -1397,7 +1395,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(objectToSave, dbDoc, collectionName));
|
||||
Object id = saveDocument(collectionName, dbDoc, objectToSave.getClass());
|
||||
|
||||
T saved = entity.populateIdIfNecessary(id);
|
||||
T saved = populateIdIfNecessary(entity.getBean(), id);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName));
|
||||
|
||||
return saved;
|
||||
|
||||
@@ -85,6 +85,23 @@ public interface ReactiveFindOperation {
|
||||
*/
|
||||
Flux<T> all();
|
||||
|
||||
/**
|
||||
* Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will
|
||||
* not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the
|
||||
* document at the "end" of the collection and then the application deletes that document.
|
||||
* <p />
|
||||
* A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the
|
||||
* streams will linger and exhaust resources. <br/>
|
||||
* <strong>NOTE:</strong> Requires a capped collection.
|
||||
*
|
||||
* @return the {@link Flux} emitting converted objects.
|
||||
* @since 2.1
|
||||
*/
|
||||
Flux<T> tail();
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
*
|
||||
|
||||
@@ -169,6 +169,15 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
return doFind(null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#tail()
|
||||
*/
|
||||
@Override
|
||||
public Flux<T> tail() {
|
||||
return doFind(template.new TailingQueryFindPublisherPreparer(query, domainType));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery)
|
||||
|
||||
@@ -24,10 +24,10 @@ import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.messaging.Message.MessageProperties;
|
||||
import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ErrorHandler;
|
||||
|
||||
import com.mongodb.client.MongoCursor;
|
||||
import com.mysema.commons.lang.Assert;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
@@ -202,7 +202,7 @@ abstract class CursorReadingTask<T, R> implements Task {
|
||||
public boolean awaitStart(Duration timeout) throws InterruptedException {
|
||||
|
||||
Assert.notNull(timeout, "Timeout must not be null!");
|
||||
Assert.isFalse(timeout.isNegative(), "Timeout must not be negative!");
|
||||
Assert.isTrue(!timeout.isNegative(), "Timeout must not be negative!");
|
||||
|
||||
return awaitStart.await(timeout.toNanos(), TimeUnit.NANOSECONDS);
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ import org.springframework.data.repository.query.QueryByExampleExecutor;
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
* @author Khaled Baklouti
|
||||
*/
|
||||
@NoRepositoryBean
|
||||
public interface MongoRepository<T, ID> extends PagingAndSortingRepository<T, ID>, QueryByExampleExecutor<T> {
|
||||
@@ -39,7 +40,7 @@ public interface MongoRepository<T, ID> extends PagingAndSortingRepository<T, ID
|
||||
* @see org.springframework.data.repository.CrudRepository#saveAll(java.lang.Iterable)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> List<S> saveAll(Iterable<S> entites);
|
||||
<S extends T> List<S> saveAll(Iterable<S> entities);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -32,7 +32,6 @@ import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecu
|
||||
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution;
|
||||
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingConverter;
|
||||
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingExecution;
|
||||
import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.TailExecution;
|
||||
import org.springframework.data.repository.query.ParameterAccessor;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
@@ -120,7 +119,7 @@ public abstract class AbstractReactiveMongoQuery implements RepositoryQuery {
|
||||
|
||||
String collection = method.getEntityInformation().getCollectionName();
|
||||
|
||||
ReactiveMongoQueryExecution execution = getExecution(query, parameterAccessor,
|
||||
ReactiveMongoQueryExecution execution = getExecution(parameterAccessor,
|
||||
new ResultProcessingConverter(processor, operations, instantiators), find);
|
||||
|
||||
return execution.execute(query, processor.getReturnedType().getDomainType(), collection);
|
||||
@@ -129,12 +128,11 @@ public abstract class AbstractReactiveMongoQuery implements RepositoryQuery {
|
||||
/**
|
||||
* Returns the execution instance to use.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @param resultProcessing must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private ReactiveMongoQueryExecution getExecution(Query query, MongoParameterAccessor accessor,
|
||||
private ReactiveMongoQueryExecution getExecution(MongoParameterAccessor accessor,
|
||||
Converter<Object, Object> resultProcessing, FindWithQuery<?> operation) {
|
||||
return new ResultProcessingExecution(getExecutionToWrap(accessor, operation), resultProcessing);
|
||||
}
|
||||
@@ -146,7 +144,7 @@ public abstract class AbstractReactiveMongoQuery implements RepositoryQuery {
|
||||
} else if (method.isGeoNearQuery()) {
|
||||
return new GeoNearExecution(operations, accessor, method.getReturnType());
|
||||
} else if (isTailable(method)) {
|
||||
return new TailExecution(operations, accessor.getPageable());
|
||||
return (q, t, c) -> operation.matching(q.with(accessor.getPageable())).tail();
|
||||
} else if (method.isCollectionQuery()) {
|
||||
return (q, t, c) -> operation.matching(q.with(accessor.getPageable())).all();
|
||||
} else if (isCountQuery()) {
|
||||
|
||||
@@ -51,23 +51,6 @@ interface ReactiveMongoQueryExecution {
|
||||
|
||||
Object execute(Query query, Class<?> type, String collection);
|
||||
|
||||
/**
|
||||
* {@link ReactiveMongoQueryExecution} for collection returning queries using tailable cursors.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
final class TailExecution implements ReactiveMongoQueryExecution {
|
||||
|
||||
private final @NonNull ReactiveMongoOperations operations;
|
||||
private final Pageable pageable;
|
||||
|
||||
@Override
|
||||
public Object execute(Query query, Class<?> type, String collection) {
|
||||
return operations.tail(query.with(pageable), type, collection);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoQueryExecution} to execute geo-near queries.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -37,7 +37,6 @@ fun <T : Any> ExecutableFindOperation.query(entityClass: KClass<T>): ExecutableF
|
||||
inline fun <reified T : Any> ExecutableFindOperation.query(): ExecutableFindOperation.ExecutableFind<T> =
|
||||
query(T::class.java)
|
||||
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableFindOperation.FindWithProjection. as] providing a [KClass] based variant.
|
||||
*
|
||||
@@ -45,7 +44,7 @@ inline fun <reified T : Any> ExecutableFindOperation.query(): ExecutableFindOper
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ExecutableFindOperation.FindWithProjection<T>.asType(resultType: KClass<T>): ExecutableFindOperation.FindWithQuery<T> =
|
||||
fun <T : Any> ExecutableFindOperation.FindWithProjection<*>.asType(resultType: KClass<T>): ExecutableFindOperation.FindWithQuery<T> =
|
||||
`as`(resultType.java)
|
||||
|
||||
/**
|
||||
@@ -55,7 +54,7 @@ fun <T : Any> ExecutableFindOperation.FindWithProjection<T>.asType(resultType: K
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ExecutableFindOperation.FindWithProjection<T>.asType(): ExecutableFindOperation.FindWithQuery<T> =
|
||||
inline fun <reified T : Any> ExecutableFindOperation.FindWithProjection<*>.asType(): ExecutableFindOperation.FindWithQuery<T> =
|
||||
`as`(T::class.java)
|
||||
|
||||
/**
|
||||
@@ -66,3 +65,12 @@ inline fun <reified T : Any> ExecutableFindOperation.FindWithProjection<T>.asTyp
|
||||
*/
|
||||
fun <T : Any> ExecutableFindOperation.DistinctWithProjection.asType(resultType: KClass<T>): ExecutableFindOperation.TerminatingDistinct<T> =
|
||||
`as`(resultType.java);
|
||||
|
||||
/**
|
||||
* Extension for [ExecutableFindOperation.DistinctWithProjection. as] leveraging reified type parameters.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
inline fun <reified T : Any> ExecutableFindOperation.DistinctWithProjection.asType(): ExecutableFindOperation.DistinctWithProjection =
|
||||
`as`(T::class.java)
|
||||
|
||||
@@ -41,7 +41,7 @@ inline fun <reified T : Any> ExecutableMapReduceOperation.mapReduce(): Executabl
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
fun <T : Any> ExecutableMapReduceOperation.MapReduceWithProjection<T>.asType(resultType: KClass<T>): ExecutableMapReduceOperation.MapReduceWithQuery<T> =
|
||||
fun <T : Any> ExecutableMapReduceOperation.MapReduceWithProjection<*>.asType(resultType: KClass<T>): ExecutableMapReduceOperation.MapReduceWithQuery<T> =
|
||||
`as`(resultType.java)
|
||||
|
||||
/**
|
||||
@@ -50,5 +50,5 @@ fun <T : Any> ExecutableMapReduceOperation.MapReduceWithProjection<T>.asType(res
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
inline fun <reified T : Any> ExecutableMapReduceOperation.MapReduceWithProjection<T>.asType(): ExecutableMapReduceOperation.MapReduceWithQuery<T> =
|
||||
inline fun <reified T : Any> ExecutableMapReduceOperation.MapReduceWithProjection<*>.asType(): ExecutableMapReduceOperation.MapReduceWithQuery<T> =
|
||||
`as`(T::class.java)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -41,7 +41,7 @@ inline fun <reified T : Any> ReactiveFindOperation.query(): ReactiveFindOperatio
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
fun <T : Any> ReactiveFindOperation.FindWithProjection<T>.asType(resultType: KClass<T>): ReactiveFindOperation.FindWithQuery<T> =
|
||||
fun <T : Any> ReactiveFindOperation.FindWithProjection<*>.asType(resultType: KClass<T>): ReactiveFindOperation.FindWithQuery<T> =
|
||||
`as`(resultType.java)
|
||||
|
||||
/**
|
||||
@@ -50,7 +50,7 @@ fun <T : Any> ReactiveFindOperation.FindWithProjection<T>.asType(resultType: KCl
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveFindOperation.FindWithProjection<T>.asType(): ReactiveFindOperation.FindWithQuery<T> =
|
||||
inline fun <reified T : Any> ReactiveFindOperation.FindWithProjection<*>.asType(): ReactiveFindOperation.FindWithQuery<T> =
|
||||
`as`(T::class.java)
|
||||
|
||||
/**
|
||||
@@ -61,3 +61,12 @@ inline fun <reified T : Any> ReactiveFindOperation.FindWithProjection<T>.asType(
|
||||
*/
|
||||
fun <T : Any> ReactiveFindOperation.DistinctWithProjection.asType(resultType: KClass<T>): ReactiveFindOperation.TerminatingDistinct<T> =
|
||||
`as`(resultType.java);
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveFindOperation.DistinctWithProjection. as] leveraging reified type parameters.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveFindOperation.DistinctWithProjection.asType(): ReactiveFindOperation.DistinctWithProjection =
|
||||
`as`(T::class.java)
|
||||
|
||||
@@ -41,7 +41,7 @@ inline fun <reified T : Any> ReactiveMapReduceOperation.mapReduce(): ReactiveMap
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
fun <T : Any> ReactiveMapReduceOperation.MapReduceWithProjection<T>.asType(resultType: KClass<T>): ReactiveMapReduceOperation.MapReduceWithQuery<T> =
|
||||
fun <T : Any> ReactiveMapReduceOperation.MapReduceWithProjection<*>.asType(resultType: KClass<T>): ReactiveMapReduceOperation.MapReduceWithQuery<T> =
|
||||
`as`(resultType.java)
|
||||
|
||||
/**
|
||||
@@ -50,5 +50,5 @@ fun <T : Any> ReactiveMapReduceOperation.MapReduceWithProjection<T>.asType(resul
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
inline fun <reified T : Any> ReactiveMapReduceOperation.MapReduceWithProjection<T>.asType(): ReactiveMapReduceOperation.MapReduceWithQuery<T> =
|
||||
inline fun <reified T : Any> ReactiveMapReduceOperation.MapReduceWithProjection<*>.asType(): ReactiveMapReduceOperation.MapReduceWithQuery<T> =
|
||||
`as`(T::class.java)
|
||||
|
||||
@@ -22,9 +22,14 @@ import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import reactor.core.Disposable;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonString;
|
||||
@@ -41,6 +46,7 @@ import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
@@ -65,7 +71,14 @@ public class ReactiveFindOperationSupportTests {
|
||||
public void setUp() {
|
||||
|
||||
blocking = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableFindOperationSupportTests"));
|
||||
blocking.dropCollection(STAR_WARS);
|
||||
recreateCollection(STAR_WARS, false);
|
||||
|
||||
insertObjects();
|
||||
|
||||
template = new ReactiveMongoTemplate(MongoClients.create(), "ExecutableFindOperationSupportTests");
|
||||
}
|
||||
|
||||
void insertObjects() {
|
||||
|
||||
han = new Person();
|
||||
han.firstname = "han";
|
||||
@@ -79,8 +92,18 @@ public class ReactiveFindOperationSupportTests {
|
||||
|
||||
blocking.save(han);
|
||||
blocking.save(luke);
|
||||
}
|
||||
|
||||
template = new ReactiveMongoTemplate(MongoClients.create(), "ExecutableFindOperationSupportTests");
|
||||
void recreateCollection(String collectionName, boolean capped) {
|
||||
|
||||
blocking.dropCollection(STAR_WARS);
|
||||
|
||||
CollectionOptions options = CollectionOptions.empty();
|
||||
if (capped) {
|
||||
options = options.capped().size(1024 * 1024);
|
||||
}
|
||||
|
||||
blocking.createCollection(STAR_WARS, options);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1719
|
||||
@@ -291,6 +314,81 @@ public class ReactiveFindOperationSupportTests {
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2080
|
||||
public void tail() throws InterruptedException {
|
||||
|
||||
recreateCollection(STAR_WARS, true);
|
||||
insertObjects();
|
||||
|
||||
BlockingQueue<Person> collector = new LinkedBlockingQueue<>();
|
||||
Flux<Person> tail = template.query(Person.class)
|
||||
.matching(query(new Criteria().orOperator(where("firstname").is("chewbacca"), where("firstname").is("luke"))))
|
||||
.tail().doOnNext(collector::add);
|
||||
|
||||
Disposable subscription = tail.subscribe();
|
||||
|
||||
assertThat(collector.poll(1, TimeUnit.SECONDS)).isEqualTo(luke);
|
||||
assertThat(collector).isEmpty();
|
||||
|
||||
Person chewbacca = new Person();
|
||||
chewbacca.firstname = "chewbacca";
|
||||
chewbacca.lastname = "chewie";
|
||||
chewbacca.id = "id-3";
|
||||
|
||||
blocking.save(chewbacca);
|
||||
|
||||
assertThat(collector.poll(1, TimeUnit.SECONDS)).isEqualTo(chewbacca);
|
||||
|
||||
subscription.dispose();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2080
|
||||
public void tailWithProjection() {
|
||||
|
||||
recreateCollection(STAR_WARS, true);
|
||||
insertObjects();
|
||||
|
||||
template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).tail()
|
||||
.as(StepVerifier::create) //
|
||||
.consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) //
|
||||
.thenCancel() //
|
||||
.verify();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2080
|
||||
public void tailWithClosedInterfaceProjection() {
|
||||
|
||||
recreateCollection(STAR_WARS, true);
|
||||
insertObjects();
|
||||
|
||||
template.query(Person.class).as(PersonProjection.class).matching(query(where("firstname").is("luke"))).tail()
|
||||
.as(StepVerifier::create) //
|
||||
.consumeNextWith(it -> {
|
||||
|
||||
assertThat(it).isInstanceOf(PersonProjection.class);
|
||||
assertThat(it.getFirstname()).isEqualTo("luke");
|
||||
}) //
|
||||
.thenCancel() //
|
||||
.verify();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2080
|
||||
public void tailWithOpenInterfaceProjection() {
|
||||
|
||||
recreateCollection(STAR_WARS, true);
|
||||
insertObjects();
|
||||
|
||||
template.query(Person.class).as(PersonSpELProjection.class).matching(query(where("firstname").is("luke"))).tail()
|
||||
.as(StepVerifier::create) //
|
||||
.consumeNextWith(it -> {
|
||||
|
||||
assertThat(it).isInstanceOf(PersonSpELProjection.class);
|
||||
assertThat(it.getName()).isEqualTo("luke");
|
||||
}) //
|
||||
.thenCancel() //
|
||||
.verify();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1719
|
||||
public void firstShouldReturnFirstEntryInCollection() {
|
||||
StepVerifier.create(template.query(Person.class).first()).expectNextCount(1).verifyComplete();
|
||||
|
||||
@@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.offset;
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
import static org.springframework.data.mongodb.test.util.Assertions.assertThat;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import reactor.core.Disposable;
|
||||
import reactor.core.publisher.Flux;
|
||||
@@ -227,6 +228,19 @@ public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanF
|
||||
disposable.dispose();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2080
|
||||
public void shouldUseTailableCursorWithDtoProjection() {
|
||||
|
||||
template.dropCollection(Capped.class) //
|
||||
.then(template.createCollection(Capped.class, //
|
||||
CollectionOptions.empty().size(1000).maxDocuments(100).capped())) //
|
||||
.as(StepVerifier::create).expectNextCount(1) //
|
||||
.verifyComplete();
|
||||
|
||||
template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete();
|
||||
cappedRepository.findDtoProjectionByKey("value").as(StepVerifier::create).expectNextCount(1).thenCancel().verify();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1444
|
||||
public void findsPeopleByLocationWithinCircle() {
|
||||
|
||||
@@ -337,6 +351,8 @@ public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanF
|
||||
|
||||
Mono<Person> findOneByLastname(String lastname);
|
||||
|
||||
Mono<DtoProjection> findOneProjectedByLastname(String lastname);
|
||||
|
||||
Mono<Person> findByLastname(Publisher<String> lastname);
|
||||
|
||||
Flux<Person> findByLastnameIn(Publisher<String> lastname);
|
||||
@@ -376,6 +392,9 @@ public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanF
|
||||
|
||||
@Tailable
|
||||
Flux<CappedProjection> findProjectionByKey(String key);
|
||||
|
||||
@Tailable
|
||||
Flux<DtoProjection> findDtoProjectionByKey(String key);
|
||||
}
|
||||
|
||||
@Document
|
||||
@@ -395,4 +414,10 @@ public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanF
|
||||
interface CappedProjection {
|
||||
double getRandom();
|
||||
}
|
||||
|
||||
@Data
|
||||
static class DtoProjection {
|
||||
String id;
|
||||
double unknown;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -53,24 +53,31 @@ class ExecutableFindOperationExtensionsTests {
|
||||
verify(operation).query(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
@Test // DATAMONGO-1689, DATAMONGO-2086
|
||||
fun `ExecutableFindOperation#FindOperationWithProjection#asType(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operationWithProjection.asType(First::class)
|
||||
verify(operationWithProjection).`as`(First::class.java)
|
||||
operationWithProjection.asType(User::class)
|
||||
verify(operationWithProjection).`as`(User::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1689
|
||||
@Test // DATAMONGO-1689, DATAMONGO-2086
|
||||
fun `ExecutableFindOperation#FindOperationWithProjection#asType() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operationWithProjection.asType()
|
||||
verify(operationWithProjection).`as`(First::class.java)
|
||||
operationWithProjection.asType<User>()
|
||||
verify(operationWithProjection).`as`(User::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1761
|
||||
@Test // DATAMONGO-1761, DATAMONGO-2086
|
||||
fun `ExecutableFindOperation#DistinctWithProjection#asType(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
distinctWithProjection.asType(First::class)
|
||||
verify(distinctWithProjection).`as`(First::class.java)
|
||||
distinctWithProjection.asType(User::class)
|
||||
verify(distinctWithProjection).`as`(User::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2086
|
||||
fun `ExecutableFindOperation#DistinctWithProjection#asType() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
distinctWithProjection.asType<User>()
|
||||
verify(distinctWithProjection).`as`(User::class.java)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,17 +49,17 @@ class ExecutableMapReduceOperationExtensionsTests {
|
||||
verify(operation).mapReduce(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1929
|
||||
@Test // DATAMONGO-1929, DATAMONGO-2086
|
||||
fun `ExecutableMapReduceOperation#MapReduceWithProjection#asType(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operationWithProjection.asType(First::class)
|
||||
verify(operationWithProjection).`as`(First::class.java)
|
||||
operationWithProjection.asType(User::class)
|
||||
verify(operationWithProjection).`as`(User::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1929
|
||||
@Test // DATAMONGO-1929, DATAMONGO-2086
|
||||
fun `ExecutableMapReduceOperation#MapReduceWithProjection#asType() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operationWithProjection.asType()
|
||||
verify(operationWithProjection).`as`(First::class.java)
|
||||
operationWithProjection.asType<User>()
|
||||
verify(operationWithProjection).`as`(User::class.java)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
* Copyright 2017-2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -52,24 +52,31 @@ class ReactiveFindOperationExtensionsTests {
|
||||
verify(operation).query(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1719
|
||||
@Test // DATAMONGO-1719, DATAMONGO-2086
|
||||
fun `ReactiveFind#FindOperatorWithProjection#asType(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operationWithProjection.asType(First::class)
|
||||
verify(operationWithProjection).`as`(First::class.java)
|
||||
operationWithProjection.asType(User::class)
|
||||
verify(operationWithProjection).`as`(User::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1719
|
||||
@Test // DATAMONGO-1719, DATAMONGO-2086
|
||||
fun `ReactiveFind#FindOperatorWithProjection#asType() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operationWithProjection.asType()
|
||||
verify(operationWithProjection).`as`(First::class.java)
|
||||
operationWithProjection.asType<User>()
|
||||
verify(operationWithProjection).`as`(User::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1761
|
||||
@Test // DATAMONGO-1761, DATAMONGO-2086
|
||||
fun `ReactiveFind#DistinctWithProjection#asType(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
distinctWithProjection.asType(First::class)
|
||||
verify(distinctWithProjection).`as`(First::class.java)
|
||||
distinctWithProjection.asType(User::class)
|
||||
verify(distinctWithProjection).`as`(User::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2086
|
||||
fun `ReactiveFind#DistinctWithProjection#asType() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
distinctWithProjection.asType<User>()
|
||||
verify(distinctWithProjection).`as`(User::class.java)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,17 +49,17 @@ class ReactiveMapReduceOperationExtensionsTests {
|
||||
verify(operation).mapReduce(First::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1929
|
||||
@Test // DATAMONGO-1929, DATAMONGO-2086
|
||||
fun `ReactiveMapReduceOperation#MapReduceWithProjection#asType(KClass) extension should call its Java counterpart`() {
|
||||
|
||||
operationWithProjection.asType(First::class)
|
||||
verify(operationWithProjection).`as`(First::class.java)
|
||||
operationWithProjection.asType(User::class)
|
||||
verify(operationWithProjection).`as`(User::class.java)
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1929
|
||||
@Test // DATAMONGO-1929, DATAMONGO-2086
|
||||
fun `ReactiveMapReduceOperation#MapReduceWithProjection#asType() with reified type parameter extension should call its Java counterpart`() {
|
||||
|
||||
operationWithProjection.asType()
|
||||
verify(operationWithProjection).`as`(First::class.java)
|
||||
operationWithProjection.asType<User>()
|
||||
verify(operationWithProjection).`as`(User::class.java)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
* <<mongo.mongo-3.validation,`validator` support for collections>>.
|
||||
* <<mongo.jsonSchema,`$jsonSchema` support>> for queries and collection creation.
|
||||
* <<change-streams, Change Stream support>> for imperative and reactive drivers.
|
||||
* Tailable cursors for imperative driver.
|
||||
* <<tailable-cursors.sync, Tailable cursors>> for imperative driver.
|
||||
* <<mongo.sessions, MongoDB 3.6 Session>> support for the imperative and reactive Template APIs.
|
||||
* <<mongo.transactions, MongoDB 4.0 Transaction>> support and a MongoDB-specific transaction manager implementation.
|
||||
* <<mongodb.repositories.queries.sort,Default sort specifications for repository query methods>> using `@Query(sort=…)`.
|
||||
|
||||
@@ -315,6 +315,14 @@ MongoDB does *not* support collection operations, such as collection creation, w
|
||||
affects the on the fly collection creation that happens on first usage. Therefore make sure to have all required
|
||||
structures in place.
|
||||
|
||||
*Transient Errors*
|
||||
|
||||
MongoDB can add special labels to errors raised during transactional execution. Those may indicate transient failures
|
||||
that might vanish by merely retrying the operation.
|
||||
We highly recommend https://github.com/spring-projects/spring-retry[Spring Retry] for those purposes. Nevertheless
|
||||
one may override `MongoTransactionManager#doCommit(MongoTransactionObject)` to implement a https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation[Retry Commit Operation]
|
||||
behavior as outlined in the MongoDB reference manual.
|
||||
|
||||
*Count*
|
||||
|
||||
MongoDB `count` operates upon collection statistics which may not reflect the actual situation within a transaction.
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
|
||||
Rich mapping support is provided by the `MappingMongoConverter`. `MappingMongoConverter` has a rich metadata model that provides a full feature set to map domain objects to MongoDB documents. The mapping metadata model is populated by using annotations on your domain objects. However, the infrastructure is not limited to using annotations as the only source of metadata information. The `MappingMongoConverter` also lets you map objects to documents without providing any additional metadata, by following a set of conventions.
|
||||
|
||||
This section describes the features of the `MappingMongoConverter`, including how to use conventions for mapping objects to documents and how to override those conventions with annotation-based mapping metadata.
|
||||
This section describes the features of the `MappingMongoConverter`, including fundamentals, how to use conventions for mapping objects to documents and how to override those conventions with annotation-based mapping metadata.
|
||||
|
||||
include::../{spring-data-commons-docs}/object-mapping.adoc[leveloffset=+1]
|
||||
|
||||
[[mapping-conventions]]
|
||||
== Convention-based Mapping
|
||||
|
||||
@@ -29,7 +29,7 @@ To create a Spring project in STS:
|
||||
. Go to File -> New -> Spring Template Project -> Simple Spring Utility Project, and press Yes when prompted. Then enter a project and a package name, such as `org.spring.mongodb.example`.
|
||||
.Add the following to the pom.xml files `dependencies` element:
|
||||
+
|
||||
[source,xml]
|
||||
[source,xml,subs="+attributes"]
|
||||
----
|
||||
<dependencies>
|
||||
|
||||
@@ -45,7 +45,7 @@ To create a Spring project in STS:
|
||||
----
|
||||
. Change the version of Spring in the pom.xml to be
|
||||
+
|
||||
[source,xml]
|
||||
[source,xml,subs="+attributes"]
|
||||
----
|
||||
<spring.framework.version>{springVersion}</spring.framework.version>
|
||||
----
|
||||
@@ -165,7 +165,7 @@ There is a https://github.com/spring-projects/spring-data-examples[GitHub reposi
|
||||
[[mongodb-connectors]]
|
||||
== Connecting to MongoDB with Spring
|
||||
|
||||
One of the first tasks when using MongoDB and Spring is to create a `com.mongodb.MongoClient` object using the IoC container. There are two main ways to do this, either by using Java-based bean metadata or by using XML-based bean metadata. Both are discussed in the following sections.
|
||||
One of the first tasks when using MongoDB and Spring is to create a `com.mongodb.MongoClient` or `com.mongodb.client.MongoClient` object using the IoC container. There are two main ways to do this, either by using Java-based bean metadata or by using XML-based bean metadata. Both are discussed in the following sections.
|
||||
|
||||
NOTE: For those not familiar with how to configure the Spring container using Java-based bean metadata instead of XML-based metadata, see the high-level introduction in the reference docs http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/new-in-3.0.html#new-java-configuration[here] as well as the detailed documentation http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#beans-java-instantiating-container[here].
|
||||
|
||||
@@ -322,6 +322,8 @@ public class MongoApp {
|
||||
|
||||
The code in bold highlights the use of `SimpleMongoDbFactory` and is the only difference between the listing shown in the <<mongodb-getting-started,getting started section>>.
|
||||
|
||||
NOTE: Use `SimpleMongoClientDbFactory` when choosing `com.mongodb.client.MongoClient` as the entrypoint of choice.
|
||||
|
||||
[[mongo.mongo-db-factory-java]]
|
||||
=== Registering a `MongoDbFactory` Instance by Using Java-based Metadata
|
||||
|
||||
@@ -363,9 +365,7 @@ In order to use authentication with XML-based configuration, use the `credential
|
||||
|
||||
NOTE: Username and password credentials used in XML-based configuration must be URL-encoded when these contain reserved characters, such as `:`, `%`, `@`, or `,`.
|
||||
The following example shows encoded credentials:
|
||||
|
||||
`m0ng0@dmin:mo_res:bw6},Qsdxx@admin@database` -> `m0ng0%40dmin:mo_res%3Abw6%7D%2CQsdxx%40admin@database`
|
||||
|
||||
See https://tools.ietf.org/html/rfc3986#section-2.2[section 2.2 of RFC 3986] for further details.
|
||||
|
||||
As of MongoDB java driver 3.7.0 there is an alternative entry point to `MongoClient` via the https://search.maven.org/beta/search?q=a:mongodb-driver-sync[mongodb-driver-sync] artifact.
|
||||
@@ -3085,4 +3085,5 @@ class GridFsClient {
|
||||
|
||||
`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database.
|
||||
|
||||
include::tailable-cursors.adoc[]
|
||||
include::change-streams.adoc[]
|
||||
|
||||
@@ -198,53 +198,3 @@ public interface PersonRepository extends ReactiveMongoRepository<Person, String
|
||||
Flux<GeoResult<Person>> findByLocationNear(Point location);
|
||||
}
|
||||
----
|
||||
|
||||
[[mongo.reactive.repositories.infinite-streams]]
|
||||
== Infinite Streams with Tailable Cursors
|
||||
|
||||
By default, MongoDB automatically closes a cursor when the client exhausts all results supplied by the cursor. Closing a cursor on exhaustion turns a stream into a finite stream. For https://docs.mongodb.com/manual/core/capped-collections/[capped collections], you can use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client consumes all initially returned data. Using tailable cursors with a reactive data types allows construction of infinite streams. A tailable cursor remains open until it is closed externally. It emits data as new documents arrive in a capped collection.
|
||||
|
||||
Tailable cursors may become dead, or invalid, if either the query returns no match or the cursor returns the document at the "`end`" of the collection and the application then deletes that document. The following example shows how to create and use an infinite stream query:
|
||||
|
||||
|
||||
.Infinite Stream queries with ReactiveMongoOperations
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Flux<Person> stream = template.tail(query(where("name").is("Joe")), Person.class);
|
||||
|
||||
Disposable subscription = stream.doOnNext(person -> System.out.println(person)).subscribe();
|
||||
|
||||
// …
|
||||
|
||||
// Later: Dispose the subscription to close the stream
|
||||
subscription.dispose();
|
||||
----
|
||||
====
|
||||
|
||||
Spring Data MongoDB Reactive repositories support infinite streams by annotating a query method with `@Tailable`. This works for methods that return `Flux` and other reactive types capable of emitting multiple elements, as the following example shows:
|
||||
|
||||
.Infinite Stream queries with ReactiveMongoRepository
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
|
||||
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
@Tailable
|
||||
Flux<Person> findByFirstname(String firstname);
|
||||
|
||||
}
|
||||
|
||||
Flux<Person> stream = repository.findByFirstname("Joe");
|
||||
|
||||
Disposable subscription = stream.doOnNext(System.out::println).subscribe();
|
||||
|
||||
// …
|
||||
|
||||
// Later: Dispose the subscription to close the stream
|
||||
subscription.dispose();
|
||||
----
|
||||
====
|
||||
|
||||
TIP: Capped collections can be created with `MongoOperations.createCollection`. To do so, provide the required `CollectionOptions.empty().capped()...`.
|
||||
|
||||
@@ -25,7 +25,7 @@ To create a Spring project in STS, go to File -> New -> Spring Template Project
|
||||
|
||||
Then add the following to the pom.xml dependencies section.
|
||||
|
||||
[source,xml]
|
||||
[source,xml,subs="+attributes"]
|
||||
----
|
||||
<dependencies>
|
||||
|
||||
@@ -40,7 +40,7 @@ Then add the following to the pom.xml dependencies section.
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
<version>{mongo.reactivestreams}</version>
|
||||
<version>{mongo-reactivestreams}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
101
src/main/asciidoc/reference/tailable-cursors.adoc
Normal file
101
src/main/asciidoc/reference/tailable-cursors.adoc
Normal file
@@ -0,0 +1,101 @@
|
||||
// carry over the old bookmarks to prevent external links from failing
|
||||
[[tailable-cursors]]
|
||||
== [[mongo.reactive.repositories.infinite-streams]] Infinite Streams with Tailable Cursors
|
||||
|
||||
By default, MongoDB automatically closes a cursor when the client exhausts all results supplied by the cursor.
|
||||
Closing a cursor on exhaustion turns a stream into a finite stream. For https://docs.mongodb.com/manual/core/capped-collections/[capped collections],
|
||||
you can use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client
|
||||
consumed all initially returned data.
|
||||
|
||||
TIP: Capped collections can be created with `MongoOperations.createCollection`. To do so, provide the required `CollectionOptions.empty().capped()...`.
|
||||
|
||||
Tailable cursors can be consumed with both, the imperative and the reactive MongoDB API. It is highly recommended to use the
|
||||
reactive variant, as it is less resource-intensive. However, if you cannot use the reactive API, you can still use a messaging
|
||||
concept that is already prevalent in the Spring ecosystem.
|
||||
|
||||
[[tailable-cursors.sync]]
|
||||
=== Tailable Cursors with `MessageListener`
|
||||
|
||||
Listening to a capped collection using a Sync Driver creates a long running, blocking task that needs to be delegated to
|
||||
a separate component. In this case, we need to first create a `MessageListenerContainer`, which will be the main entry point
|
||||
for running the specific `SubscriptionRequest`. Spring Data MongoDB already ships with a default implementation that
|
||||
operates on `MongoTemplate` and is capable of creating and executing `Task` instances for a `TailableCursorRequest`.
|
||||
|
||||
The following example shows how to use tailable cursors with `MessageListener` instances:
|
||||
|
||||
.Tailable Cursors with `MessageListener` instances
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MessageListenerContainer container = new DefaultMessageListenerContainer(template);
|
||||
container.start(); <1>
|
||||
|
||||
MessageListener<Document, User> listener = System.out::println; <2>
|
||||
|
||||
TailableCursorRequest request = TailableCursorRequest.builder()
|
||||
.collection("orders") <3>
|
||||
.filter(query(where("value").lt(100))) <4>
|
||||
.publishTo(listener) <5>
|
||||
.build();
|
||||
|
||||
container.register(request, User.class); <6>
|
||||
|
||||
// ...
|
||||
|
||||
container.stop(); <7>
|
||||
----
|
||||
<1> Starting the container intializes the resources and starts `Task` instances for already registered `SubscriptionRequest` instances. Requests added after startup are ran immediately.
|
||||
<2> Define the listener called when a `Message` is received. The `Message#getBody()` is converted to the requested domain type. Use `Document` to receive raw results without conversion.
|
||||
<3> Set the collection to listen to.
|
||||
<4> Provide an optional filter for documents to receive.
|
||||
<5> Set the message listener to publish incoming ``Message``s to.
|
||||
<6> Register the request. The returned `Subscription` can be used to check the current `Task` state and cancel its execution to free resources.
|
||||
<5> Do not forget to stop the container once you are sure you no longer need it. Doing so stops all running `Task` instances within the container.
|
||||
====
|
||||
|
||||
[[tailable-cursors.reactive]]
|
||||
=== Reactive Tailable Cursors
|
||||
|
||||
Using tailable cursors with a reactive data types allows construction of infinite streams. A tailable cursor remains open until it is closed externally. It emits data as new documents arrive in a capped collection.
|
||||
|
||||
Tailable cursors may become dead, or invalid, if either the query returns no match or the cursor returns the document at the "`end`" of the collection and the application then deletes that document. The following example shows how to create and use an infinite stream query:
|
||||
|
||||
.Infinite Stream queries with ReactiveMongoOperations
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Flux<Person> stream = template.tail(query(where("name").is("Joe")), Person.class);
|
||||
|
||||
Disposable subscription = stream.doOnNext(person -> System.out.println(person)).subscribe();
|
||||
|
||||
// …
|
||||
|
||||
// Later: Dispose the subscription to close the stream
|
||||
subscription.dispose();
|
||||
----
|
||||
====
|
||||
|
||||
Spring Data MongoDB Reactive repositories support infinite streams by annotating a query method with `@Tailable`. This works for methods that return `Flux` and other reactive types capable of emitting multiple elements, as the following example shows:
|
||||
|
||||
.Infinite Stream queries with ReactiveMongoRepository
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
|
||||
public interface PersonRepository extends ReactiveMongoRepository<Person, String> {
|
||||
|
||||
@Tailable
|
||||
Flux<Person> findByFirstname(String firstname);
|
||||
|
||||
}
|
||||
|
||||
Flux<Person> stream = repository.findByFirstname("Joe");
|
||||
|
||||
Disposable subscription = stream.doOnNext(System.out::println).subscribe();
|
||||
|
||||
// …
|
||||
|
||||
// Later: Dispose the subscription to close the stream
|
||||
subscription.dispose();
|
||||
----
|
||||
====
|
||||
@@ -1,6 +1,51 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 2.1.0.RELEASE (2018-09-21)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-2091 - Upgrade to MongoDB Java Driver 3.8.2 and Reactive Streams Driver 1.9.2.
|
||||
* DATAMONGO-2090 - Include documentation about Object Mapping Fundamentals.
|
||||
* DATAMONGO-2087 - Typo in MongoRepository.
|
||||
* DATAMONGO-2086 - Kotlin Fluent API extensions do not allow projections with find queries.
|
||||
* DATAMONGO-2080 - DTO projections with reactive @Tailable query methods fail with IllegalArgumentException: Property must not be null.
|
||||
* DATAMONGO-2078 - Update reference documentation on tailable cursors with the sync MongoDB Java driver.
|
||||
* DATAMONGO-2076 - Fix property substitution in getting started section of reference docs.
|
||||
* DATAMONGO-2075 - Open up MongoTransaction manager to allow transaction commit retry..
|
||||
* DATAMONGO-2069 - Required dependency 'mysema-commons-lang'.
|
||||
* DATAMONGO-2065 - Make sure that MongoTemplate.doSave(…) calls local populateIdIfNecessary(…) to allow customization.
|
||||
* DATAMONGO-2064 - Upgrade to MongoDB Java Driver 3.8.1.
|
||||
* DATAMONGO-2061 - Release 2.1 GA (Lovelace).
|
||||
|
||||
|
||||
Changes in version 2.0.10.RELEASE (2018-09-10)
|
||||
----------------------------------------------
|
||||
* DATAMONGO-2076 - Fix property substitution in getting started section of reference docs.
|
||||
* DATAMONGO-2055 - Allow position modifier to be negative using push at position on Update.
|
||||
* DATAMONGO-2051 - Add support for SCRAM-SHA-256 authentication mechanism to MongoCredentialPropertyEditor.
|
||||
* DATAMONGO-2050 - Add support for index selection via key attribute for $geoNear aggregation.
|
||||
* DATAMONGO-2049 - Add support for MongoDB 4.0 string aggregation operators.
|
||||
* DATAMONGO-2048 - Add support for MongoDB 4.0 type conversion aggregation operators.
|
||||
* DATAMONGO-2047 - Update $dateToString and $dateFromString aggregation operators to match MongoDB 4.0 changes.
|
||||
* DATAMONGO-2046 - Investigate performance regressions between 2.0 GA and 2.1 RC2.
|
||||
* DATAMONGO-2043 - MappingMongoConverter.write(…) does not consider Document a native type and adds _class attribute.
|
||||
* DATAMONGO-2034 - Release 2.0.10 (Kay SR10).
|
||||
* DATAMONGO-2027 - outputCollection and outputType of MapReduceOptions not work.
|
||||
|
||||
|
||||
Changes in version 1.10.15.RELEASE (2018-09-10)
|
||||
-----------------------------------------------
|
||||
* DATAMONGO-2076 - Fix property substitution in getting started section of reference docs.
|
||||
* DATAMONGO-2057 - Guard MongoDbUtils integration tests against MongoDB 4.0 changes.
|
||||
* DATAMONGO-2055 - Allow position modifier to be negative using push at position on Update.
|
||||
* DATAMONGO-2051 - Add support for SCRAM-SHA-256 authentication mechanism to MongoCredentialPropertyEditor.
|
||||
* DATAMONGO-2050 - Add support for index selection via key attribute for $geoNear aggregation.
|
||||
* DATAMONGO-2049 - Add support for MongoDB 4.0 string aggregation operators.
|
||||
* DATAMONGO-2048 - Add support for MongoDB 4.0 type conversion aggregation operators.
|
||||
* DATAMONGO-2047 - Update $dateToString and $dateFromString aggregation operators to match MongoDB 4.0 changes.
|
||||
* DATAMONGO-2043 - MappingMongoConverter.write(…) does not consider Document a native type and adds _class attribute.
|
||||
* DATAMONGO-2035 - Release 1.10.15 (Ingalls SR15).
|
||||
|
||||
|
||||
Changes in version 2.1.0.RC2 (2018-08-20)
|
||||
-----------------------------------------
|
||||
* DATAMONGO-2055 - Allow position modifier to be negative using push at position on Update.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 2.1 RC2
|
||||
Spring Data MongoDB 2.1 GA
|
||||
Copyright (c) [2010-2015] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
|
||||
Reference in New Issue
Block a user