Compare commits

..

7 Commits

Author SHA1 Message Date
Christoph Strobl
569f9838d2 switch to commons branch 2020-10-16 17:47:58 +02:00
Christoph Strobl
e4f2085861 some movement 2020-10-15 13:47:16 +02:00
Christoph Strobl
326a10f1bb extract some interfaces 2020-10-15 09:52:21 +02:00
Christoph Strobl
b61c1abd7b hick hack - annotation support for properties 2020-10-12 14:13:33 +02:00
Christoph Strobl
6d5d9776c9 make it see the constructor 2020-10-12 13:00:01 +02:00
Christoph Strobl
755f65299d Move and modify 2020-10-12 12:24:18 +02:00
Christoph Strobl
0b507c342f prepare issue branch. 2020-10-12 12:23:59 +02:00
37 changed files with 1067 additions and 753 deletions

10
Jenkinsfile vendored
View File

@@ -46,16 +46,16 @@ pipeline {
}
}
}
stage('Publish JDK 15 + MongoDB 4.2') {
stage('Publish JDK 14 + MongoDB 4.2') {
when {
changeset "ci/openjdk15-mongodb-4.2/**"
changeset "ci/openjdk14-mongodb-4.2/**"
}
agent { label 'data' }
options { timeout(time: 30, unit: 'MINUTES') }
steps {
script {
def image = docker.build("springci/spring-data-openjdk15-with-mongodb-4.2.0", "ci/openjdk15-mongodb-4.2/")
def image = docker.build("springci/spring-data-openjdk14-with-mongodb-4.2.0", "ci/openjdk14-mongodb-4.2/")
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
image.push()
}
@@ -139,10 +139,10 @@ pipeline {
}
}
stage("test: baseline (jdk15)") {
stage("test: baseline (jdk14)") {
agent {
docker {
image 'springci/spring-data-openjdk15-with-mongodb-4.2.0:latest'
image 'springci/spring-data-openjdk14-with-mongodb-4.2.0:latest'
label 'data'
args '-v $HOME:/tmp/jenkins-home'
}

View File

@@ -1,4 +1,4 @@
FROM adoptopenjdk/openjdk15:latest
FROM adoptopenjdk/openjdk14:latest
ENV TZ=Etc/UTC
ENV DEBIAN_FRONTEND=noninteractive

12
pom.xml
View File

@@ -5,7 +5,7 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>3.2.0-M1</version>
<version>3.1.0-STATIC-METADATA-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Spring Data MongoDB</name>
@@ -15,7 +15,7 @@
<parent>
<groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId>
<version>2.5.0-M1</version>
<version>2.4.0-SNAPSHOT</version>
</parent>
<modules>
@@ -26,8 +26,8 @@
<properties>
<project.type>multi</project.type>
<dist.id>spring-data-mongodb</dist.id>
<springdata.commons>2.5.0-M1</springdata.commons>
<mongo>4.1.1</mongo>
<springdata.commons>2.4.0-BUILD-TIME-DOMAIN-TYPE-METADATA-SNAPSHOT</springdata.commons>
<mongo>4.1.0</mongo>
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
<jmh.version>1.19</jmh.version>
</properties>
@@ -134,8 +134,8 @@
<repositories>
<repository>
<id>spring-libs-milestone</id>
<url>https://repo.spring.io/libs-milestone</url>
<id>spring-libs-snapshot</id>
<url>https://repo.spring.io/libs-snapshot</url>
</repository>
<repository>
<id>sonatype-libs-snapshot</id>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>3.2.0-M1</version>
<version>3.1.0-STATIC-METADATA-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -14,7 +14,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>3.2.0-M1</version>
<version>3.1.0-STATIC-METADATA-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -11,7 +11,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>3.2.0-M1</version>
<version>3.1.0-STATIC-METADATA-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -77,7 +77,7 @@ class AggregationUtil {
}
if (!(aggregation instanceof TypedAggregation)) {
return new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper);
return Aggregation.DEFAULT_CONTEXT;
}
Class<?> inputType = ((TypedAggregation) aggregation).getInputType();
@@ -98,7 +98,7 @@ class AggregationUtil {
*/
List<Document> createPipeline(Aggregation aggregation, AggregationOperationContext context) {
if (ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
return aggregation.toPipeline(context);
}

View File

@@ -156,7 +156,6 @@ import com.mongodb.client.result.UpdateResult;
* @author Michael J. Simons
* @author Roman Puchkovskiy
* @author Yadhukrishna S Pai
* @author Anton Barkan
*/
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
@@ -3290,10 +3289,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize());
}
if (meta.getAllowDiskUse() != null) {
cursorToUse = cursorToUse.allowDiskUse(meta.getAllowDiskUse());
}
for (Meta.CursorOption option : meta.getFlags()) {
switch (option) {

View File

@@ -707,9 +707,10 @@ class QueryOperations {
*/
List<Document> getUpdatePipeline(@Nullable Class<?> domainType) {
Class<?> type = domainType != null ? domainType : Object.class;
AggregationOperationContext context = domainType != null
? new RelaxedTypeBasedAggregationOperationContext(domainType, mappingContext, queryMapper)
: Aggregation.DEFAULT_CONTEXT;
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, queryMapper);
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
}

View File

@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core;
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
@@ -2113,7 +2112,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
AggregationOperationContext context = agg instanceof TypedAggregation
? new TypeBasedAggregationOperationContext(((TypedAggregation<?>) agg).getInputType(),
getConverter().getMappingContext(), queryMapper)
: new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper);
: Aggregation.DEFAULT_CONTEXT;
return agg.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument",
Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns")));
@@ -3329,10 +3328,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
if (meta.getCursorBatchSize() != null) {
findPublisherToUse = findPublisherToUse.batchSize(meta.getCursorBatchSize());
}
if (meta.getAllowDiskUse() != null) {
findPublisherToUse = findPublisherToUse.allowDiskUse(meta.getAllowDiskUse());
}
}
} catch (RuntimeException e) {

View File

@@ -264,7 +264,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
return new Document(getOperator(), fieldObject);
}
/*
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator()
*/
@@ -1450,14 +1450,6 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
return field.getTarget();
}
if (field.getTarget().equals(Fields.UNDERSCORE_ID)) {
try {
return context.getReference(field).getReferenceValue();
} catch (java.lang.IllegalArgumentException e) {
return Fields.UNDERSCORE_ID_REF;
}
}
// check whether referenced field exists in the context
return context.getReference(field).getReferenceValue();

View File

@@ -21,7 +21,6 @@ import java.util.ArrayList;
import java.util.List;
import org.bson.Document;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.PersistentPropertyPath;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
@@ -30,7 +29,6 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldRefe
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.util.Lazy;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
@@ -48,7 +46,6 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
private final Class<?> type;
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
private final QueryMapper mapper;
private final Lazy<MongoPersistentEntity<?>> entity;
/**
* Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and
@@ -68,7 +65,6 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
this.type = type;
this.mappingContext = mappingContext;
this.mapper = mapper;
this.entity = Lazy.of(() -> mappingContext.getPersistentEntity(type));
}
/*
@@ -155,14 +151,10 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
protected FieldReference getReferenceFor(Field field) {
if(entity.getNullable() == null) {
return new DirectFieldReference(new ExposedField(field, true));
}
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext
.getPersistentPropertyPath(field.getTarget(), type);
.getPersistentPropertyPath(field.getTarget(), type);
Field mappedField = field(field.getName(),
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE));
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE));
return new DirectFieldReference(new ExposedField(mappedField, true));
}

View File

@@ -142,8 +142,12 @@ public class UnionWithOperation implements AggregationOperation {
private AggregationOperationContext computeContext(AggregationOperationContext source) {
if (domainType == null) {
return Aggregation.DEFAULT_CONTEXT;
}
if (source instanceof TypeBasedAggregationOperationContext) {
return ((TypeBasedAggregationOperationContext) source).continueOnMissingFieldReference(domainType != null ? domainType : Object.class);
return ((TypeBasedAggregationOperationContext) source).continueOnMissingFieldReference(domainType);
}
if (source instanceof ExposedFieldsAggregationOperationContext) {

View File

@@ -181,11 +181,7 @@ public class Meta {
}
/**
* Enables writing to temporary files for aggregation stages and queries. When set to {@literal true}, aggregation
* stages can write data to the {@code _tmp} subdirectory in the {@code dbPath} directory.
* <p>
* Starting in MongoDB 4.2, the profiler log messages and diagnostic log messages includes a {@code usedDisk}
* indicator if any aggregation stage wrote data to temporary files due to memory restrictions.
* Set to {@literal true}, to allow aggregation stages to write data to disk.
*
* @param allowDiskUse use {@literal null} for server defaults.
* @since 3.0

View File

@@ -46,7 +46,6 @@ import org.springframework.util.Assert;
* @author Thomas Darimont
* @author Christoph Strobl
* @author Mark Paluch
* @author Anton Barkan
*/
public class Query {
@@ -373,24 +372,6 @@ public class Query {
return this;
}
/**
* Enables writing to temporary files for aggregation stages and queries. When set to {@literal true}, aggregation
* stages can write data to the {@code _tmp} subdirectory in the {@code dbPath} directory.
* <p>
* Starting in MongoDB 4.2, the profiler log messages and diagnostic log messages includes a {@code usedDisk}
* indicator if any aggregation stage wrote data to temporary files due to memory restrictions.
*
* @param allowDiskUse
* @return this.
* @see Meta#setAllowDiskUse(Boolean)
* @since 3.2
*/
public Query allowDiskUse(boolean allowDiskUse) {
meta.setAllowDiskUse(allowDiskUse);
return this;
}
/**
* Set the number of documents to return in each response batch. <br />
* Use {@literal 0 (zero)} for no limit. A <strong>negative limit</strong> closes the cursor after returning a single

View File

@@ -64,4 +64,16 @@ public interface ReactiveMongoRepository<T, ID> extends ReactiveSortingRepositor
*/
<S extends T> Flux<S> insert(Publisher<S> entities);
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
*/
<S extends T> Flux<S> findAll(Example<S> example);
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
*/
<S extends T> Flux<S> findAll(Example<S> example, Sort sort);
}

View File

@@ -51,7 +51,6 @@ import com.mongodb.client.result.DeleteResult;
* @author Thomas Darimont
* @author Mark Paluch
* @author Mehran Behnam
* @author Jens Schauder
*/
public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
@@ -73,10 +72,6 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
this.mongoOperations = mongoOperations;
}
// -------------------------------------------------------------------------
// Methods from CrudRepository
// -------------------------------------------------------------------------
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#save(java.lang.Object)
@@ -140,27 +135,6 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#findAll()
*/
@Override
public List<T> findAll() {
return findAll(new Query());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#findAllById(java.lang.Iterable)
*/
@Override
public Iterable<T> findAllById(Iterable<ID> ids) {
Assert.notNull(ids, "The given Ids of entities not be null!");
return findAll(getIdQuery(ids));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#count()
@@ -201,19 +175,6 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
}
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#deleteAllById(java.lang.Iterable)
*/
@Override
public void deleteAllById(Iterable<? extends ID> ids) {
Assert.notNull(ids, "The given Iterable of ids must not be null!");
mongoOperations.remove(getIdQuery(ids), entityInformation.getJavaType(),
entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#delete(java.lang.Iterable)
@@ -221,7 +182,7 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
@Override
public void deleteAll(Iterable<? extends T> entities) {
Assert.notNull(entities, "The given Iterable of entities must not be null!");
Assert.notNull(entities, "The given Iterable of entities not be null!");
entities.forEach(this::delete);
}
@@ -235,9 +196,27 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
mongoOperations.remove(new Query(), entityInformation.getCollectionName());
}
// -------------------------------------------------------------------------
// Methods from PagingAndSortingRepository
// -------------------------------------------------------------------------
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#findAll()
*/
@Override
public List<T> findAll() {
return findAll(new Query());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.CrudRepository#findAllById(java.lang.Iterable)
*/
@Override
public Iterable<T> findAllById(Iterable<ID> ids) {
Assert.notNull(ids, "The given Ids of entities not be null!");
return findAll(new Query(new Criteria(entityInformation.getIdAttribute())
.in(Streamable.of(ids).stream().collect(StreamUtils.toUnmodifiableList()))));
}
/*
* (non-Javadoc)
@@ -266,10 +245,6 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
return findAll(new Query().with(sort));
}
// -------------------------------------------------------------------------
// Methods from MongoRepository
// -------------------------------------------------------------------------
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.MongoRepository#insert(java.lang.Object)
@@ -300,33 +275,23 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
return new ArrayList<>(mongoOperations.insertAll(list));
}
// -------------------------------------------------------------------------
// Methods from QueryByExampleExecutor
// -------------------------------------------------------------------------
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findOne(org.springframework.data.domain.Example)
* @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example, org.springframework.data.domain.Pageable)
*/
@Override
public <S extends T> Optional<S> findOne(Example<S> example) {
public <S extends T> Page<S> findAll(final Example<S> example, Pageable pageable) {
Assert.notNull(example, "Sample must not be null!");
Assert.notNull(pageable, "Pageable must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation());
.collation(entityInformation.getCollation()).with(pageable); //
return Optional
.ofNullable(mongoOperations.findOne(query, example.getProbeType(), entityInformation.getCollectionName()));
}
List<S> list = mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName());
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> List<S> findAll(Example<S> example) {
return findAll(example, Sort.unsorted());
return PageableExecutionUtils.getPage(list, pageable,
() -> mongoOperations.count(Query.of(query).limit(-1).skip(-1), example.getProbeType(), entityInformation.getCollectionName()));
}
/*
@@ -348,21 +313,27 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example, org.springframework.data.domain.Pageable)
* @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Page<S> findAll(Example<S> example, Pageable pageable) {
public <S extends T> List<S> findAll(Example<S> example) {
return findAll(example, Sort.unsorted());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findOne(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Optional<S> findOne(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Assert.notNull(pageable, "Pageable must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation()).with(pageable); //
.collation(entityInformation.getCollation());
List<S> list = mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName());
return PageableExecutionUtils.getPage(list, pageable,
() -> mongoOperations.count(Query.of(query).limit(-1).skip(-1), example.getProbeType(), entityInformation.getCollectionName()));
return Optional
.ofNullable(mongoOperations.findOne(query, example.getProbeType(), entityInformation.getCollectionName()));
}
/*
@@ -395,10 +366,6 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
return mongoOperations.exists(query, example.getProbeType(), entityInformation.getCollectionName());
}
// -------------------------------------------------------------------------
// Utility methods
// -------------------------------------------------------------------------
private Query getIdQuery(Object id) {
return new Query(getIdCriteria(id));
}
@@ -407,11 +374,6 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
return where(entityInformation.getIdAttribute()).is(id);
}
private Query getIdQuery(Iterable<? extends ID> ids) {
return new Query(new Criteria(entityInformation.getIdAttribute())
.in(Streamable.of(ids).stream().collect(StreamUtils.toUnmodifiableList())));
}
private List<T> findAll(@Nullable Query query) {
if (query == null) {
@@ -420,5 +382,4 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName());
}
}

View File

@@ -21,12 +21,10 @@ import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.reactivestreams.Publisher;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.dao.OptimisticLockingFailureException;
import org.springframework.data.domain.Example;
@@ -49,7 +47,6 @@ import com.mongodb.client.result.DeleteResult;
* @author Oliver Gierke
* @author Christoph Strobl
* @author Ruben J Garcia
* @author Jens Schauder
* @since 2.0
*/
public class SimpleReactiveMongoRepository<T, ID extends Serializable> implements ReactiveMongoRepository<T, ID> {
@@ -67,9 +64,232 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
this.mongoOperations = mongoOperations;
}
// -------------------------------------------------------------------------
// Methods from ReactiveCrudRepository
// -------------------------------------------------------------------------
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findById(java.lang.Object)
*/
@Override
public Mono<T> findById(ID id) {
Assert.notNull(id, "The given id must not be null!");
return mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findById(org.reactivestreams.Publisher)
*/
@Override
public Mono<T> findById(Publisher<ID> publisher) {
Assert.notNull(publisher, "The given id must not be null!");
return Mono.from(publisher).flatMap(
id -> mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName()));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findOne(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Mono<S> findOne(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation()) //
.limit(2);
return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()).buffer(2)
.map(vals -> {
if (vals.size() > 1) {
throw new IncorrectResultSizeDataAccessException(1);
}
return vals.iterator().next();
}).next();
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(java.lang.Object)
*/
@Override
public Mono<Boolean> existsById(ID id) {
Assert.notNull(id, "The given id must not be null!");
return mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(org.reactivestreams.Publisher)
*/
@Override
public Mono<Boolean> existsById(Publisher<ID> publisher) {
Assert.notNull(publisher, "The given id must not be null!");
return Mono.from(publisher).flatMap(id -> mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
entityInformation.getCollectionName()));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#exists(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Mono<Boolean> exists(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation());
return mongoOperations.exists(query, example.getProbeType(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveSortingRepository#findAll()
*/
@Override
public Flux<T> findAll() {
return findAll(new Query());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findAllById(java.lang.Iterable)
*/
@Override
public Flux<T> findAllById(Iterable<ID> ids) {
Assert.notNull(ids, "The given Iterable of Id's must not be null!");
return findAll(new Query(new Criteria(entityInformation.getIdAttribute())
.in(Streamable.of(ids).stream().collect(StreamUtils.toUnmodifiableList()))));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findAllById(org.reactivestreams.Publisher)
*/
@Override
public Flux<T> findAllById(Publisher<ID> ids) {
Assert.notNull(ids, "The given Publisher of Id's must not be null!");
return Flux.from(ids).buffer().flatMap(this::findAllById);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveSortingRepository#findAll(org.springframework.data.domain.Sort)
*/
@Override
public Flux<T> findAll(Sort sort) {
Assert.notNull(sort, "Sort must not be null!");
return findAll(new Query().with(sort));
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
*/
@Override
public <S extends T> Flux<S> findAll(Example<S> example, Sort sort) {
Assert.notNull(example, "Sample must not be null!");
Assert.notNull(sort, "Sort must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation()) //
.with(sort);
return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#findAll(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Flux<S> findAll(Example<S> example) {
Assert.notNull(example, "Example must not be null!");
return findAll(example, Sort.unsorted());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#count()
*/
@Override
public Mono<Long> count() {
return mongoOperations.count(new Query(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#count(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Mono<Long> count(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation());
return mongoOperations.count(query, example.getProbeType(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(java.lang.Object)
*/
@Override
public <S extends T> Mono<S> insert(S entity) {
Assert.notNull(entity, "Entity must not be null!");
return mongoOperations.insert(entity, entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(java.lang.Iterable)
*/
@Override
public <S extends T> Flux<S> insert(Iterable<S> entities) {
Assert.notNull(entities, "The given Iterable of entities must not be null!");
List<S> source = Streamable.of(entities).stream().collect(StreamUtils.toUnmodifiableList());
return source.isEmpty() ? Flux.empty() : Flux.from(mongoOperations.insertAll(source));
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(org.reactivestreams.Publisher)
*/
@Override
public <S extends T> Flux<S> insert(Publisher<S> entities) {
Assert.notNull(entities, "The given Publisher of entities must not be null!");
return Flux.from(entities).flatMap(entity -> mongoOperations.insert(entity, entityInformation.getCollectionName()));
}
/*
* (non-Javadoc)
@@ -117,100 +337,6 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
mongoOperations.save(entity, entityInformation.getCollectionName()).then(Mono.just(entity)));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findById(java.lang.Object)
*/
@Override
public Mono<T> findById(ID id) {
Assert.notNull(id, "The given id must not be null!");
return mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findById(org.reactivestreams.Publisher)
*/
@Override
public Mono<T> findById(Publisher<ID> publisher) {
Assert.notNull(publisher, "The given id must not be null!");
return Mono.from(publisher).flatMap(
id -> mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName()));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(java.lang.Object)
*/
@Override
public Mono<Boolean> existsById(ID id) {
Assert.notNull(id, "The given id must not be null!");
return mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(org.reactivestreams.Publisher)
*/
@Override
public Mono<Boolean> existsById(Publisher<ID> publisher) {
Assert.notNull(publisher, "The given id must not be null!");
return Mono.from(publisher).flatMap(id -> mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(),
entityInformation.getCollectionName()));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveSortingRepository#findAll()
*/
@Override
public Flux<T> findAll() {
return findAll(new Query());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findAllById(java.lang.Iterable)
*/
@Override
public Flux<T> findAllById(Iterable<ID> ids) {
Assert.notNull(ids, "The given Iterable of Id's must not be null!");
return findAll(getIdQuery(ids));
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findAllById(org.reactivestreams.Publisher)
*/
@Override
public Flux<T> findAllById(Publisher<ID> ids) {
Assert.notNull(ids, "The given Publisher of Id's must not be null!");
return Flux.from(ids).buffer().flatMap(this::findAllById);
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#count()
*/
@Override
public Mono<Long> count() {
return mongoOperations.count(new Query(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteById(java.lang.Object)
@@ -266,19 +392,6 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
return remove.then();
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteAllById(java.lang.Iterable)
*/
@Override
public Mono<Void> deleteAllById(Iterable<? extends ID> ids) {
Assert.notNull(ids, "The given Iterable of Id's must not be null!");
return mongoOperations
.remove(getIdQuery(ids), entityInformation.getJavaType(), entityInformation.getCollectionName()).then();
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteAll(java.lang.Iterable)
@@ -288,14 +401,7 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
Assert.notNull(entities, "The given Iterable of entities must not be null!");
Collection<?> idCollection = StreamUtils.createStreamFromIterator(entities.iterator()).map(entityInformation::getId)
.collect(Collectors.toList());
Criteria idsInCriteria = where(entityInformation.getIdAttribute()).in(idCollection);
return mongoOperations
.remove(new Query(idsInCriteria), entityInformation.getJavaType(), entityInformation.getCollectionName())
.then();
return Flux.fromIterable(entities).flatMap(this::delete).then();
}
/*
@@ -322,151 +428,6 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
return mongoOperations.remove(new Query(), entityInformation.getCollectionName()).then(Mono.empty());
}
// -------------------------------------------------------------------------
// Methods from ReactiveSortingRepository
// -------------------------------------------------------------------------
/*
* (non-Javadoc)
* @see org.springframework.data.repository.reactive.ReactiveSortingRepository#findAll(org.springframework.data.domain.Sort)
*/
@Override
public Flux<T> findAll(Sort sort) {
Assert.notNull(sort, "Sort must not be null!");
return findAll(new Query().with(sort));
}
// -------------------------------------------------------------------------
// Methods from ReactiveMongoRepository
// -------------------------------------------------------------------------
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(java.lang.Object)
*/
@Override
public <S extends T> Mono<S> insert(S entity) {
Assert.notNull(entity, "Entity must not be null!");
return mongoOperations.insert(entity, entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(java.lang.Iterable)
*/
@Override
public <S extends T> Flux<S> insert(Iterable<S> entities) {
Assert.notNull(entities, "The given Iterable of entities must not be null!");
List<S> source = Streamable.of(entities).stream().collect(StreamUtils.toUnmodifiableList());
return source.isEmpty() ? Flux.empty() : Flux.from(mongoOperations.insertAll(source));
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(org.reactivestreams.Publisher)
*/
@Override
public <S extends T> Flux<S> insert(Publisher<S> entities) {
Assert.notNull(entities, "The given Publisher of entities must not be null!");
return Flux.from(entities).flatMap(entity -> mongoOperations.insert(entity, entityInformation.getCollectionName()));
}
// -------------------------------------------------------------------------
// Methods from ReactiveMongoRepository
// -------------------------------------------------------------------------
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findOne(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Mono<S> findOne(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation()) //
.limit(2);
return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()).buffer(2)
.map(vals -> {
if (vals.size() > 1) {
throw new IncorrectResultSizeDataAccessException(1);
}
return vals.iterator().next();
}).next();
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#findAll(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Flux<S> findAll(Example<S> example) {
Assert.notNull(example, "Example must not be null!");
return findAll(example, Sort.unsorted());
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
*/
@Override
public <S extends T> Flux<S> findAll(Example<S> example, Sort sort) {
Assert.notNull(example, "Sample must not be null!");
Assert.notNull(sort, "Sort must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation()) //
.with(sort);
return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#count(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Mono<Long> count(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation());
return mongoOperations.count(query, example.getProbeType(), entityInformation.getCollectionName());
}
/*
* (non-Javadoc)
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#exists(org.springframework.data.domain.Example)
*/
@Override
public <S extends T> Mono<Boolean> exists(Example<S> example) {
Assert.notNull(example, "Sample must not be null!");
Query query = new Query(new Criteria().alike(example)) //
.collation(entityInformation.getCollation());
return mongoOperations.exists(query, example.getProbeType(), entityInformation.getCollectionName());
}
private Query getIdQuery(Object id) {
return new Query(getIdCriteria(id));
}
@@ -475,13 +436,6 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
return where(entityInformation.getIdAttribute()).is(id);
}
private Query getIdQuery(Iterable<? extends ID> ids) {
Collection<?> idCollection = StreamUtils.createStreamFromIterator(ids.iterator()).collect(Collectors.toList());
Criteria idsInCriteria = where(entityInformation.getIdAttribute()).in(idCollection);
return new Query(idsInCriteria);
}
private Flux<T> findAll(Query query) {
return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName());

View File

@@ -27,7 +27,7 @@ import kotlin.reflect.KProperty1
* @since 2.2
*/
class KPropertyPath<T, U>(
internal val parent: KProperty<U?>,
internal val parent: KProperty<U>,
internal val child: KProperty1<U, T>
) : KProperty<T> by child
@@ -52,8 +52,7 @@ internal fun asString(property: KProperty<*>): String {
* Book::author / Author::name isEqualTo "Herman Melville"
* ```
* @author Tjeu Kayim
* @author Yoann de Martino
* @since 2.2
*/
operator fun <T, U> KProperty<T?>.div(other: KProperty1<T, U>) =
operator fun <T, U> KProperty<T>.div(other: KProperty1<T, U>) =
KPropertyPath(this, other)

View File

@@ -0,0 +1,68 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.buildtimetypeinfo;
import org.springframework.util.ObjectUtils;
/**
* @author Christoph Strobl
* @since 2020/10
*/
public class Address {
String city;
String street;
public Address(String city, String street) {
this.city = city;
this.street = street;
}
public String getCity() {
return city;
}
public String getStreet() {
return street;
}
@Override
public String toString() {
return "Address{" + "city='" + city + '\'' + ", street='" + street + '\'' + '}';
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Address address = (Address) o;
if (!ObjectUtils.nullSafeEquals(city, address.city)) {
return false;
}
return ObjectUtils.nullSafeEquals(street, address.street);
}
@Override
public int hashCode() {
int result = ObjectUtils.nullSafeHashCode(city);
result = 31 * result + ObjectUtils.nullSafeHashCode(street);
return result;
}
}

View File

@@ -0,0 +1,50 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.buildtimetypeinfo;
import org.springframework.data.mapping.model.DomainTypeConstructor;
import org.springframework.data.mapping.model.DomainTypeInformation;
import org.springframework.data.mapping.model.Field;
/**
* @author Christoph Strobl
* @since 2020/10
*/
public class AddressTypeInformation extends DomainTypeInformation<Address> {
private static final AddressTypeInformation INSTANCE = new AddressTypeInformation();
private AddressTypeInformation() {
super(Address.class);
// CONSTRUCTOR
setConstructor(computePreferredConstructor());
// FIELDS
addField(Field.<Address> string("city").getter(Address::getCity));
addField(Field.<Address> string("street").getter(Address::getStreet));
}
public static AddressTypeInformation instance() {
return INSTANCE;
}
private DomainTypeConstructor<Address> computePreferredConstructor() {
return DomainTypeConstructor.<Address> builder().args("city", "street")
.newInstanceFunction(args -> new Address((String) args[0], (String) args[1]));
}
}

View File

@@ -0,0 +1,139 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.buildtimetypeinfo;
import java.util.List;
import org.springframework.util.ObjectUtils;
/**
* @author Christoph Strobl
* @since 2020/10
*/
public class Person {
private long id;
private String firstname, lastname; // TODO: we need a persistence constructor to resolve this here.
private int age;
private Address address;
private List<String> nicknames;
public Person(String firstname, String lastname) {
this.firstname = firstname;
this.lastname = lastname;
}
private Person(long id, String firstname, String lastname, int age, Address address, List<String> nicknames) {
this.id = id;
this.firstname = firstname;
this.lastname = lastname;
this.age = age;
this.address = address;
this.nicknames = nicknames;
}
public String getFirstname() {
return firstname;
}
public String getLastname() {
return lastname;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public long getId() {
return id;
}
public Person withId(long id) {
return new Person(id, firstname, lastname, age, address, nicknames);
}
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
public List<String> getNicknames() {
return nicknames;
}
public void setNicknames(List<String> nicknames) {
this.nicknames = nicknames;
}
public void setFirstname(String firstname) {
this.firstname = firstname;
}
public void setLastname(String lastname) {
this.lastname = lastname;
}
@Override
public String toString() {
return "Person{" + "id=" + id + ", firstname='" + firstname + '\'' + ", lastname='" + lastname + '\'' + ", age="
+ age + ", address=" + address + ", nicknames=" + nicknames + '}';
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Person person = (Person) o;
if (id != person.id)
return false;
if (age != person.age)
return false;
if (!ObjectUtils.nullSafeEquals(firstname, person.firstname)) {
return false;
}
if (!ObjectUtils.nullSafeEquals(lastname, person.lastname)) {
return false;
}
if (!ObjectUtils.nullSafeEquals(address, person.address)) {
return false;
}
return ObjectUtils.nullSafeEquals(nicknames, person.nicknames);
}
@Override
public int hashCode() {
int result = (int) (id ^ (id >>> 32));
result = 31 * result + ObjectUtils.nullSafeHashCode(firstname);
result = 31 * result + ObjectUtils.nullSafeHashCode(lastname);
result = 31 * result + age;
result = 31 * result + ObjectUtils.nullSafeHashCode(address);
result = 31 * result + ObjectUtils.nullSafeHashCode(nicknames);
return result;
}
}

View File

@@ -0,0 +1,128 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.buildtimetypeinfo;
import java.lang.annotation.Annotation;
import java.util.List;
import org.springframework.data.mapping.model.DomainTypeConstructor;
import org.springframework.data.mapping.model.DomainTypeInformation;
import org.springframework.data.mapping.model.Field;
import org.springframework.data.mapping.model.ListTypeInformation;
import org.springframework.data.mapping.model.StringTypeInformation;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.FieldType;
/**
* @author Christoph Strobl
* @since 2020/10
*/
public class PersonTypeInformation extends DomainTypeInformation<Person> {
private static final PersonTypeInformation INSTANCE = new PersonTypeInformation();
private PersonTypeInformation() {
super(Person.class);
// CONSTRUCTOR
setConstructor(computePreferredConstructor());
// ANNOTATIONS
addAnnotation(computeAtDocumentAnnotation());
// FIELDS
addField(
Field.<Person> int64("id").annotatedWithAtId().getter(Person::getId).wither((bean, id) -> bean.withId(id)));
addField(Field.<Person> string("firstname").getter(Person::getFirstname).annotation(atFieldOnFirstname()));
addField(Field.<Person> string("lastname").getter(Person::getLastname));
addField(Field.<Person> int32("age").getter(Person::getAge).setter(Person::setAge));
addField(Field.<Person, Address> type("address", AddressTypeInformation.instance()).getter(Person::getAddress)
.setter(Person::setAddress));
addField(Field.<Person, List<String>> type("nicknames", new ListTypeInformation<>(StringTypeInformation.instance()))
.getter(Person::getNicknames).setter(Person::setNicknames));
}
public static PersonTypeInformation instance() {
return INSTANCE;
}
private DomainTypeConstructor<Person> computePreferredConstructor() {
return DomainTypeConstructor.<Person> builder().args("firstname", "lastname")
.newInstanceFunction((args) -> new Person((String) args[0], (String) args[1]));
}
private Document computeAtDocumentAnnotation() {
return new Document() {
@Override
public Class<? extends Annotation> annotationType() {
return Document.class;
}
@Override
public String value() {
return collection();
}
@Override
public String collection() {
return "star-wars";
}
@Override
public String language() {
return "";
}
@Override
public String collation() {
return "";
}
};
}
private Annotation atFieldOnFirstname() {
return new org.springframework.data.mongodb.core.mapping.Field() {
@Override
public Class<? extends Annotation> annotationType() {
return org.springframework.data.mongodb.core.mapping.Field.class;
}
@Override
public String value() {
return "first-name";
}
@Override
public String name() {
return value();
}
@Override
public int order() {
return 0;
}
@Override
public FieldType targetType() {
return FieldType.IMPLICIT;
}
};
}
}

View File

@@ -45,7 +45,6 @@ import com.mongodb.client.FindIterable;
* @author Oliver Gierke
* @author Christoph Strobl
* @author Mark Paluch
* @author Anton Barkan
*/
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.LENIENT)
@@ -62,7 +61,6 @@ class QueryCursorPreparerUnitTests {
when(factory.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry());
when(cursor.batchSize(anyInt())).thenReturn(cursor);
when(cursor.comment(anyString())).thenReturn(cursor);
when(cursor.allowDiskUse(anyBoolean())).thenReturn(cursor);
when(cursor.maxTime(anyLong(), any())).thenReturn(cursor);
when(cursor.hint(any())).thenReturn(cursor);
when(cursor.noCursorTimeout(anyBoolean())).thenReturn(cursor);
@@ -96,6 +94,27 @@ class QueryCursorPreparerUnitTests {
verify(cursor).hint(new Document("age", 1));
}
// TODO
// @Test // DATAMONGO-957
// public void doesNotApplyMetaWhenEmpty() {
//
// Query query = query(where("foo").is("bar"));
// query.setMeta(new Meta());
//
// prepare(query);
//
// verify(cursor, never()).modifiers(any(Document.class));
// }
// @Test // DATAMONGO-957
// public void appliesMaxScanCorrectly() {
//
// Query query = query(where("foo").is("bar")).maxScan(100);
// prepare(query);
//
// verify(cursor).maxScan(100);
// }
@Test // DATAMONGO-957
void appliesMaxTimeCorrectly() {
@@ -114,14 +133,15 @@ class QueryCursorPreparerUnitTests {
verify(cursor).comment("spring data");
}
@Test // DATAMONGO-2659
void appliesAllowDiskUseCorrectly() {
Query query = query(where("foo").is("bar")).allowDiskUse(true);
prepare(query);
verify(cursor).allowDiskUse(true);
}
// TODO
// @Test // DATAMONGO-957
// public void appliesSnapshotCorrectly() {
//
// Query query = query(where("foo").is("bar")).useSnapshot();
// prepare(query);
//
// verify(cursor).snapshot(true);
// }
@Test // DATAMONGO-1480
void appliesNoCursorTimeoutCorrectly() {

View File

@@ -180,7 +180,6 @@ public class ReactiveMongoTemplateUnitTests {
when(findPublisher.limit(anyInt())).thenReturn(findPublisher);
when(findPublisher.collation(any())).thenReturn(findPublisher);
when(findPublisher.first()).thenReturn(findPublisher);
when(findPublisher.allowDiskUse(anyBoolean())).thenReturn(findPublisher);
when(aggregatePublisher.allowDiskUse(anyBoolean())).thenReturn(aggregatePublisher);
when(aggregatePublisher.collation(any())).thenReturn(aggregatePublisher);
when(aggregatePublisher.maxTime(anyLong(), any())).thenReturn(aggregatePublisher);
@@ -232,17 +231,6 @@ public class ReactiveMongoTemplateUnitTests {
verify(findPublisher).batchSize(1234);
}
@Test // DATAMONGO-2659
void executeQueryShouldUseAllowDiskSizeWhenPresent() {
when(findPublisher.batchSize(anyInt())).thenReturn(findPublisher);
Query query = new Query().allowDiskUse(true);
template.find(query, Person.class).subscribe();
verify(findPublisher).allowDiskUse(true);
}
@Test // DATAMONGO-1518
void findShouldUseCollationWhenPresent() {

View File

@@ -1928,22 +1928,6 @@ public class AggregationTests {
assertThat(results.getRawResults()).isEmpty();
}
@Test // DATAMONGO-2635
void mapsEnumsInMatchClauseUsingInCriteriaCorrectly() {
WithEnum source = new WithEnum();
source.enumValue = MyEnum.TWO;
source.id = "id-1";
mongoTemplate.save(source);
Aggregation agg = newAggregation(match(where("enumValue").in(Collections.singletonList(MyEnum.TWO))));
AggregationResults<Document> results = mongoTemplate.aggregate(agg, mongoTemplate.getCollectionName(WithEnum.class),
Document.class);
assertThat(results.getMappedResults()).hasSize(1);
}
private void createUsersWithReferencedPersons() {
mongoTemplate.dropCollection(User.class);
@@ -2256,15 +2240,4 @@ public class AggregationTests {
String p1;
String p2;
}
static enum MyEnum {
ONE, TWO
}
@lombok.Data
static class WithEnum {
@Id String id;
MyEnum enumValue;
}
}

View File

@@ -29,11 +29,6 @@ import org.bson.Document;
import org.junit.jupiter.api.Test;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond;
import org.springframework.data.mongodb.core.aggregation.ProjectionOperationUnitTests.BookWithFieldAnnotation;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.query.Criteria;
/**
@@ -587,17 +582,6 @@ public class AggregationUnitTests {
"{\"attributeRecordArrays\": {\"$reduce\": {\"input\": \"$attributeRecordArrays\", \"initialValue\": [], \"in\": {\"$concatArrays\": [\"$$value\", \"$$this\"]}}}}"));
}
@Test // DATAMONGO-2644
void projectOnIdIsAlwaysValid() {
MongoMappingContext mappingContext = new MongoMappingContext();
Document target = new Aggregation(bucket("start"), project("_id")).toDocument("collection-1",
new RelaxedTypeBasedAggregationOperationContext(BookWithFieldAnnotation.class, mappingContext,
new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))));
assertThat(extractPipelineElement(target, 1, "$project")).isEqualTo(Document.parse(" { \"_id\" : \"$_id\" }"));
}
private Document extractPipelineElement(Document agg, int index, String operation) {
List<Document> pipeline = (List<Document>) agg.get("pipeline");

View File

@@ -23,6 +23,7 @@ import static org.springframework.data.mongodb.core.DocumentTestUtils.*;
import lombok.EqualsAndHashCode;
import lombok.RequiredArgsConstructor;
import java.lang.annotation.Annotation;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URL;
@@ -70,6 +71,7 @@ import org.springframework.data.mongodb.core.convert.DocumentAccessorUnitTests.N
import org.springframework.data.mongodb.core.convert.DocumentAccessorUnitTests.ProjectingType;
import org.springframework.data.mongodb.core.convert.MappingMongoConverterUnitTests.ClassWithMapUsingEnumAsKey.FooBarEnum;
import org.springframework.data.mongodb.core.geo.Sphere;
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.data.mongodb.core.mapping.Field;
import org.springframework.data.mongodb.core.mapping.FieldType;
@@ -78,7 +80,9 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.core.mapping.PersonPojoStringId;
import org.springframework.data.mongodb.core.mapping.TextScore;
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
import org.springframework.data.mongodb.buildtimetypeinfo.AddressTypeInformation;
import org.springframework.data.util.ClassTypeInformation;
import org.springframework.data.mongodb.buildtimetypeinfo.PersonTypeInformation;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.BasicDBList;
@@ -2179,6 +2183,108 @@ public class MappingMongoConverterUnitTests {
assertThat(((LinkedHashMap) result.get("cluster")).get("_id")).isEqualTo(100L);
}
// @Test
// public void perf1() {
//
// ClassTypeInformation.warmCache(PersonTypeInformation.instance(), AddressTypeInformation.instance());
//
// MongoMappingContext mappingContext = new MongoMappingContext();
// mappingContext.setInitialEntitySet(new LinkedHashSet<>(
// Arrays.asList(org.springframework.data.mongodb.xxx.Person.class, org.springframework.data.mongodb.xxx.Address.class)));
// mappingContext.initialize();
//
// MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
//
// org.springframework.data.mongodb.xxx.Person source = new org.springframework.data.mongodb.xxx.Person("spring", "data");
// source.setAddress(new org.springframework.data.mongodb.xxx.Address("the city", "never sleeps"));
// source.setAge(10);
// source = source.withId(9876);
// source.setNicknames(Arrays.asList("tick", "trick", "track"));
//
// StopWatch stopWatch = new StopWatch();
//
// List<org.bson.Document> sources = new ArrayList<>();
// stopWatch.start("write");
// for (int i = 0; i < 10000; i++) {
//
// org.bson.Document targetDocument = new org.bson.Document();
// converter.write(source, targetDocument);
//
// sources.add(targetDocument);
// }
// stopWatch.stop();
//
// stopWatch.start("read");
// for (org.bson.Document sourceDoc : sources) {
// assertThat(converter.read(org.springframework.data.mongodb.xxx.Person.class, sourceDoc)).isEqualTo(source);
// }
// stopWatch.stop();
//
// System.out.println(stopWatch.prettyPrint());
//
// }
// public void perf2() {
//
// ClassTypeInformation.warmCache(new PersonTypeInformation(), new AddressTypeInformation());
//
// MongoMappingContext mappingContext = new MongoMappingContext();
// mappingContext.setInitialEntitySet(new LinkedHashSet<>(Arrays.asList(org.springframework.data.mongodb.xxx.Person.class,
// org.springframework.data.mongodb.xxx.Address.class)));
// mappingContext.initialize();
//
// MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
//
// org.springframework.data.mongodb.xxx.Person source = new org.springframework.data.mongodb.xxx.Person("spring", "data");
// source.setAddress(new org.springframework.data.mongodb.xxx.Address("the city", "never sleeps"));
// source.setAge(10);
// source.setId(9876);
// source.setNicknames(Arrays.asList("tick", "trick", "track"));
//
// }
@Test
public void staticEntityMetadata() {
ClassTypeInformation.warmCache(PersonTypeInformation.instance(), AddressTypeInformation.instance());
MongoMappingContext mappingContext = new MongoMappingContext();
mappingContext.setInitialEntitySet(new LinkedHashSet<>(
Arrays.asList(org.springframework.data.mongodb.buildtimetypeinfo.Person.class, org.springframework.data.mongodb.buildtimetypeinfo.Address.class)));
mappingContext.initialize();
org.springframework.data.mongodb.buildtimetypeinfo.Person source = new org.springframework.data.mongodb.buildtimetypeinfo.Person("spring", "data");
source.setAddress(new org.springframework.data.mongodb.buildtimetypeinfo.Address("the city", "never sleeps"));
source.setAge(10);
source = source.withId(9876);
source.setNicknames(Arrays.asList("tick", "trick", "track"));
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
org.bson.Document targetDocument = new org.bson.Document();
System.out.println();
System.out.println("------ WRITE -------");
converter.write(source, targetDocument);
System.out.println();
System.out.println("targetDocument: " + targetDocument);
System.out.println();
System.out.println("------ READ -------");
assertThat(targetDocument).containsEntry("_id", 9876L);
assertThat(targetDocument).containsEntry("first-name", "spring");
assertThat(targetDocument).containsEntry("address",
new org.bson.Document("city", "the city").append("street", "never sleeps"));
assertThat(targetDocument).containsEntry("nicknames", Arrays.asList("tick", "trick", "track"));
org.springframework.data.mongodb.buildtimetypeinfo.Person targetEntity = converter.read(org.springframework.data.mongodb.buildtimetypeinfo.Person.class,
targetDocument);
System.out.println();
System.out.println("targetEntity: " + targetEntity);
assertThat(targetEntity).isEqualTo(source);
BasicMongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(org.springframework.data.mongodb.buildtimetypeinfo.Person.class);
assertThat(entity.getCollection()).isEqualTo("star-wars");
}
static class GenericType<T> {
T content;
}
@@ -2649,4 +2755,34 @@ public class MappingMongoConverterUnitTests {
return entity;
}
}
void xxx2() {
new Field() {
@Override
public Class<? extends Annotation> annotationType() {
return null;
}
@Override
public String value() {
return null;
}
@Override
public String name() {
return null;
}
@Override
public int order() {
return 0;
}
@Override
public FieldType targetType() {
return null;
}
};
}
}

View File

@@ -0,0 +1,110 @@
/*
* Copyright 2020. the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.staticmetadata;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import static org.springframework.data.mongodb.core.query.Query.*;
import java.util.Arrays;
import java.util.LinkedHashSet;
import org.bson.Document;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.buildtimetypeinfo.Address;
import org.springframework.data.mongodb.buildtimetypeinfo.AddressTypeInformation;
import org.springframework.data.util.ClassTypeInformation;
import org.springframework.data.mongodb.buildtimetypeinfo.Person;
import org.springframework.data.mongodb.buildtimetypeinfo.PersonTypeInformation;
import com.mongodb.client.MongoClients;
/**
* @author Christoph Strobl
* @since 2020/10
*/
public class StaticMetadataTests {
MongoMappingContext mappingContext;
MappingMongoConverter mongoConverter;
MongoTemplate template;
Person luke;
@BeforeAll
static void beforeAll() {
ClassTypeInformation.warmCache(PersonTypeInformation.instance(), AddressTypeInformation.instance());
}
@BeforeEach
void beforeEach() {
mappingContext = new MongoMappingContext();
mappingContext.setInitialEntitySet(new LinkedHashSet<>(
Arrays.asList(Person.class, Address.class)));
mappingContext.initialize();
mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
mongoConverter.afterPropertiesSet();
template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(MongoClients.create(), "sem"), mongoConverter);
luke = new Person("luke", "skywalker");
luke.setAddress(new Address("Mos Eisley", "WB154"));
luke.setAge(22);
luke = luke.withId(9876);
luke.setNicknames(Arrays.asList("jedi", "wormie"));
}
@Test
void readWrite() {
template.save(luke);
Document savedDocument = template.execute("star-wars",
collection -> collection.find(new Document("_id", luke.getId())).first());
System.out.println("savedDocument.toJson(): " + savedDocument.toJson());
Person savedEntity = template.findOne(query(where("id").is(luke.getId())), Person.class);
System.out.println("savedEntity: " + savedEntity);
assertThat(savedEntity).isEqualTo(luke);
}
}

View File

@@ -66,6 +66,7 @@ import org.springframework.data.mongodb.test.util.MongoClientExtension;
import org.springframework.data.mongodb.test.util.MongoTestUtils;
import org.springframework.data.querydsl.ReactiveQuerydslPredicateExecutor;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider;
import org.springframework.test.context.junit.jupiter.SpringExtension;
@@ -76,13 +77,11 @@ import com.mongodb.reactivestreams.client.MongoClient;
*
* @author Mark Paluch
* @author Christoph Strobl
* @author Jens Schauder
*/
@ExtendWith({ MongoClientExtension.class, SpringExtension.class })
class ReactiveMongoRepositoryTests {
public class ReactiveMongoRepositoryTests {
private static final int PERSON_COUNT = 7;
private static @Client MongoClient mongoClient;
static @Client MongoClient mongoClient;
@Autowired ReactiveMongoTemplate template;
@@ -90,8 +89,8 @@ class ReactiveMongoRepositoryTests {
@Autowired ReactiveContactRepository contactRepository;
@Autowired ReactiveCappedCollectionRepository cappedRepository;
private Person dave, oliver, carter, boyd, stefan, leroi, alicia;
private QPerson person = QPerson.person;
Person dave, oliver, carter, boyd, stefan, leroi, alicia;
QPerson person = QPerson.person;
@Configuration
static class Config extends AbstractReactiveMongoConfiguration {
@@ -141,14 +140,14 @@ class ReactiveMongoRepositoryTests {
}
@BeforeAll
static void cleanDb() {
public static void cleanDb() {
MongoTestUtils.createOrReplaceCollectionNow("reactive", "person", mongoClient);
MongoTestUtils.createOrReplaceCollectionNow("reactive", "capped", mongoClient);
}
@BeforeEach
void setUp() throws Exception {
public void setUp() throws Exception {
repository.deleteAll().as(StepVerifier::create).verifyComplete();
@@ -165,35 +164,35 @@ class ReactiveMongoRepositoryTests {
alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE);
repository.saveAll(Arrays.asList(oliver, carter, boyd, stefan, leroi, alicia, dave)).as(StepVerifier::create) //
.expectNextCount(PERSON_COUNT) //
.expectNextCount(7) //
.verifyComplete();
}
@Test // DATAMONGO-1444
void shouldFindByLastName() {
public void shouldFindByLastName() {
repository.findByLastname(dave.getLastname()).as(StepVerifier::create).expectNextCount(2).verifyComplete();
}
@Test // DATAMONGO-1444
void shouldFindOneByLastName() {
public void shouldFindOneByLastName() {
repository.findOneByLastname(carter.getLastname()).as(StepVerifier::create).expectNext(carter).verifyComplete();
}
@Test // DATAMONGO-1444
void shouldFindOneByPublisherOfLastName() {
public void shouldFindOneByPublisherOfLastName() {
repository.findByLastname(Mono.just(carter.getLastname())).as(StepVerifier::create).expectNext(carter)
.verifyComplete();
}
@Test // DATAMONGO-1444
void shouldFindByPublisherOfLastNameIn() {
public void shouldFindByPublisherOfLastNameIn() {
repository.findByLastnameIn(Flux.just(carter.getLastname(), dave.getLastname())).as(StepVerifier::create) //
.expectNextCount(3) //
.verifyComplete();
}
@Test // DATAMONGO-1444
void shouldFindByPublisherOfLastNameInAndAgeGreater() {
public void shouldFindByPublisherOfLastNameInAndAgeGreater() {
repository.findByLastnameInAndAgeGreaterThan(Flux.just(carter.getLastname(), dave.getLastname()), 41)
.as(StepVerifier::create) //
@@ -202,7 +201,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void shouldFindUsingPublishersInStringQuery() {
public void shouldFindUsingPublishersInStringQuery() {
repository.findStringQuery(Flux.just("Beauford", "Matthews"), Mono.just(41)).as(StepVerifier::create) //
.expectNextCount(2) //
@@ -210,7 +209,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void shouldFindByLastNameAndSort() {
public void shouldFindByLastNameAndSort() {
repository.findByLastname("Matthews", Sort.by(ASC, "age")).as(StepVerifier::create) //
.expectNext(oliver, dave) //
@@ -222,7 +221,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void shouldUseTailableCursor() throws Exception {
public void shouldUseTailableCursor() throws Exception {
template.dropCollection(Capped.class) //
.then(template.createCollection(Capped.class, //
@@ -247,7 +246,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void shouldUseTailableCursorWithProjection() throws Exception {
public void shouldUseTailableCursorWithProjection() throws Exception {
template.dropCollection(Capped.class) //
.then(template.createCollection(Capped.class, //
@@ -278,7 +277,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2080
void shouldUseTailableCursorWithDtoProjection() {
public void shouldUseTailableCursorWithDtoProjection() {
template.dropCollection(Capped.class) //
.then(template.createCollection(Capped.class, //
@@ -291,7 +290,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void findsPeopleByLocationWithinCircle() {
public void findsPeopleByLocationWithinCircle() {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
@@ -303,7 +302,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void findsPeopleByPageableLocationWithinCircle() {
public void findsPeopleByPageableLocationWithinCircle() {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
@@ -316,7 +315,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void findsPeopleGeoresultByLocationWithinBox() {
public void findsPeopleGeoresultByLocationWithinBox() {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
@@ -331,7 +330,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void findsPeoplePageableGeoresultByLocationWithinBox() throws InterruptedException {
public void findsPeoplePageableGeoresultByLocationWithinBox() throws InterruptedException {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
@@ -351,7 +350,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1444
void findsPeopleByLocationWithinBox() throws InterruptedException {
public void findsPeopleByLocationWithinBox() throws InterruptedException {
Point point = new Point(-73.99171, 40.738868);
dave.setLocation(point);
@@ -367,23 +366,23 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1865
void shouldErrorOnFindOneWithNonUniqueResult() {
public void shouldErrorOnFindOneWithNonUniqueResult() {
repository.findOneByLastname(dave.getLastname()).as(StepVerifier::create)
.expectError(IncorrectResultSizeDataAccessException.class).verify();
}
@Test // DATAMONGO-1865
void shouldReturnFirstFindFirstWithMoreResults() {
public void shouldReturnFirstFindFirstWithMoreResults() {
repository.findFirstByLastname(dave.getLastname()).as(StepVerifier::create).expectNextCount(1).verifyComplete();
}
@Test // DATAMONGO-2030
void shouldReturnExistsBy() {
public void shouldReturnExistsBy() {
repository.existsByLastname(dave.getLastname()).as(StepVerifier::create).expectNext(true).verifyComplete();
}
@Test // DATAMONGO-1979
void findAppliesAnnotatedSort() {
public void findAppliesAnnotatedSort() {
repository.findByAgeGreaterThan(40).collectList().as(StepVerifier::create).consumeNextWith(result -> {
assertThat(result).containsSequence(carter, boyd, dave, leroi);
@@ -391,7 +390,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1979
void findWithSortOverwritesAnnotatedSort() {
public void findWithSortOverwritesAnnotatedSort() {
repository.findByAgeGreaterThan(40, Sort.by(Direction.ASC, "age")).collectList().as(StepVerifier::create)
.consumeNextWith(result -> {
@@ -400,7 +399,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2181
void considersRepositoryCollectionName() {
public void considersRepositoryCollectionName() {
repository.deleteAll() //
.as(StepVerifier::create) //
@@ -429,7 +428,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2182
void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() {
public void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() {
repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id))) //
.collectList() //
@@ -440,19 +439,24 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2153
void findListOfSingleValue() {
public void findListOfSingleValue() {
repository.findAllLastnames() //
.collectList() //
.as(StepVerifier::create) //
.assertNext(actual -> {
assertThat(actual)
.contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews");
assertThat(actual) //
.contains("Lessard") //
.contains("Keys") //
.contains("Tinsley") //
.contains("Beauford") //
.contains("Moore") //
.contains("Matthews");
}).verifyComplete();
}
@Test // DATAMONGO-2153
void annotatedAggregationWithPlaceholderValue() {
public void annotatedAggregationWithPlaceholderValue() {
repository.groupByLastnameAnd("firstname") //
.collectList() //
@@ -469,7 +473,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2153
void annotatedAggregationWithSort() {
public void annotatedAggregationWithSort() {
repository.groupByLastnameAnd("firstname", Sort.by("lastname")) //
.collectList() //
@@ -488,7 +492,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2153
void annotatedAggregationWithPageable() {
public void annotatedAggregationWithPageable() {
repository.groupByLastnameAnd("firstname", PageRequest.of(1, 2, Sort.by("lastname"))) //
.collectList() //
@@ -503,7 +507,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2153
void annotatedAggregationWithSingleSimpleResult() {
public void annotatedAggregationWithSingleSimpleResult() {
repository.sumAge() //
.as(StepVerifier::create) //
@@ -512,7 +516,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2153
void annotatedAggregationWithAggregationResultAsReturnType() {
public void annotatedAggregationWithAggregationResultAsReturnType() {
repository.sumAgeAndReturnRawResult() //
.as(StepVerifier::create) //
@@ -521,7 +525,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2153
void annotatedAggregationWithAggregationResultAsReturnTypeAndProjection() {
public void annotatedAggregationWithAggregationResultAsReturnTypeAndProjection() {
repository.sumAgeAndReturnSumWrapper() //
.as(StepVerifier::create) //
@@ -530,7 +534,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2374
void findsWithNativeProjection() {
public void findsWithNativeProjection() {
repository.findDocumentById(dave.getId()) //
.as(StepVerifier::create) //
@@ -540,7 +544,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2153
void annotatedAggregationWithAggregationResultAsMap() {
public void annotatedAggregationWithAggregationResultAsMap() {
repository.sumAgeAndReturnSumAsMap() //
.as(StepVerifier::create) //
@@ -550,7 +554,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2403
void annotatedAggregationExtractingSimpleValueIsEmptyForEmptyDocument() {
public void annotatedAggregationExtractingSimpleValueIsEmptyForEmptyDocument() {
Person p = new Person("project-on-lastanme", null);
repository.save(p).then().as(StepVerifier::create).verifyComplete();
@@ -561,7 +565,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2403
void annotatedAggregationSkipsEmptyDocumentsWhenExtractingSimpleValue() {
public void annotatedAggregationSkipsEmptyDocumentsWhenExtractingSimpleValue() {
String firstname = "project-on-lastanme";
@@ -580,7 +584,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-2406
void deleteByShouldHandleVoidResultTypeCorrectly() {
public void deleteByShouldHandleVoidResultTypeCorrectly() {
repository.deleteByLastname(dave.getLastname()) //
.as(StepVerifier::create) //
@@ -592,7 +596,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1997
void deleteByShouldAllowDeletedCountAsResult() {
public void deleteByShouldAllowDeletedCountAsResult() {
repository.deleteCountByLastname(dave.getLastname()) //
.as(StepVerifier::create) //
@@ -601,7 +605,7 @@ class ReactiveMongoRepositoryTests {
}
@Test // DATAMONGO-1997
void deleteByShouldAllowSingleDocumentRemovalCorrectly() {
public void deleteByShouldAllowSingleDocumentRemovalCorrectly() {
repository.deleteSinglePersonByLastname(carter.getLastname()) //
.as(StepVerifier::create) //
@@ -613,18 +617,6 @@ class ReactiveMongoRepositoryTests {
.verifyComplete();
}
@Test // DATAMONGO-2652
void deleteAllById() {
repository.deleteAllById(Arrays.asList(carter.id, dave.id)) //
.as(StepVerifier::create) //
.verifyComplete();
repository.count().as(StepVerifier::create) //
.expectNext(PERSON_COUNT - 2L) //
.verifyComplete();
}
interface ReactivePersonRepository
extends ReactiveMongoRepository<Person, String>, ReactiveQuerydslPredicateExecutor<Person> {
@@ -725,7 +717,7 @@ class ReactiveMongoRepositoryTests {
String key;
double random;
Capped(String key, double random) {
public Capped(String key, double random) {
this.key = key;
this.random = random;
}

View File

@@ -15,11 +15,11 @@
*/
package org.springframework.data.mongodb.repository.support;
import static java.util.Arrays.*;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.domain.ExampleMatcher.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -58,13 +58,12 @@ import org.springframework.transaction.support.TransactionTemplate;
* @author Thomas Darimont
* @author Christoph Strobl
* @author Mark Paluch
* @author Jens Schauder
*/
@ExtendWith({ MongoTemplateExtension.class, MongoServerCondition.class })
class SimpleMongoRepositoryTests {
public class SimpleMongoRepositoryTests {
@Template(initialEntitySet = Person.class) //
private static MongoTestTemplate template;
static MongoTestTemplate template;
private Person oliver, dave, carter, boyd, stefan, leroi, alicia;
private List<Person> all;
@@ -74,7 +73,7 @@ class SimpleMongoRepositoryTests {
template);
@BeforeEach
void setUp() {
public void setUp() {
repository.deleteAll();
@@ -86,21 +85,21 @@ class SimpleMongoRepositoryTests {
leroi = new Person("Leroi", "Moore", 41);
alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE);
all = repository.saveAll(asList(oliver, dave, carter, boyd, stefan, leroi, alicia));
all = repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia));
}
@Test
void findAllFromCustomCollectionName() {
assertThat(repository.findAll()).hasSameSizeAs(all);
public void findALlFromCustomCollectionName() {
assertThat(repository.findAll()).hasSize(all.size());
}
@Test
void findOneFromCustomCollectionName() {
assertThat(repository.findById(dave.getId())).contains(dave);
public void findOneFromCustomCollectionName() {
assertThat(repository.findById(dave.getId()).get()).isEqualTo(dave);
}
@Test
void deleteFromCustomCollectionName() {
public void deleteFromCustomCollectionName() {
repository.delete(dave);
@@ -108,7 +107,7 @@ class SimpleMongoRepositoryTests {
}
@Test
void deleteByIdFromCustomCollectionName() {
public void deleteByIdFromCustomCollectionName() {
repository.deleteById(dave.getId());
@@ -116,7 +115,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1054
void shouldInsertSingle() {
public void shouldInsertSingle() {
String randomId = UUID.randomUUID().toString();
@@ -127,7 +126,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1054
void shouldInsertMultipleFromList() {
public void shouldInsertMultipleFromList() {
String randomId = UUID.randomUUID().toString();
Map<String, Person> idToPerson = new HashMap<String, Person>();
@@ -141,12 +140,12 @@ class SimpleMongoRepositoryTests {
List<Person> saved = repository.insert(persons);
assertThat(saved).hasSameSizeAs(persons);
assertThat(saved).hasSize(persons.size());
assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved);
}
@Test // DATAMONGO-1054
void shouldInsertMutlipleFromSet() {
public void shouldInsertMutlipleFromSet() {
String randomId = UUID.randomUUID().toString();
Map<String, Person> idToPerson = new HashMap<String, Person>();
@@ -160,12 +159,12 @@ class SimpleMongoRepositoryTests {
List<Person> saved = repository.insert(persons);
assertThat(saved).hasSameSizeAs(persons);
assertThat(saved).hasSize(persons.size());
assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved);
}
@Test // DATAMONGO-1245, DATAMONGO-1464
void findByExampleShouldLookUpEntriesCorrectly() {
public void findByExampleShouldLookUpEntriesCorrectly() {
Person sample = new Person();
sample.setLastname("Matthews");
@@ -178,7 +177,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1464
void findByExampleMultiplePagesShouldLookUpEntriesCorrectly() {
public void findByExampleMultiplePagesShouldLookUpEntriesCorrectly() {
Person sample = new Person();
sample.setLastname("Matthews");
@@ -191,7 +190,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldLookUpEntriesCorrectly() {
public void findAllByExampleShouldLookUpEntriesCorrectly() {
Person sample = new Person();
sample.setLastname("Matthews");
@@ -201,7 +200,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObject() {
public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObject() {
dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington"));
repository.save(dave);
@@ -217,7 +216,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingPartialNestedObject() {
public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingPartialNestedObject() {
dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington"));
repository.save(dave);
@@ -233,7 +232,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldNotFindEntriesWhenUsingPartialNestedObjectInStrictMode() {
public void findAllByExampleShouldNotFindEntriesWhenUsingPartialNestedObjectInStrictMode() {
dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington"));
repository.save(dave);
@@ -248,7 +247,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObjectInStrictMode() {
public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObjectInStrictMode() {
dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington"));
repository.save(dave);
@@ -263,7 +262,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldRespectStringMatchMode() {
public void findAllByExampleShouldRespectStringMatchMode() {
Person sample = new Person();
sample.setLastname("Mat");
@@ -275,7 +274,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldResolveDbRefCorrectly() {
public void findAllByExampleShouldResolveDbRefCorrectly() {
User user = new User();
user.setId("c0nf1ux");
@@ -295,7 +294,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldResolveLegacyCoordinatesCorrectly() {
public void findAllByExampleShouldResolveLegacyCoordinatesCorrectly() {
Person megan = new Person("megan", "tarash");
megan.setLocation(new Point(41.85003D, -87.65005D));
@@ -310,7 +309,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldResolveGeoJsonCoordinatesCorrectly() {
public void findAllByExampleShouldResolveGeoJsonCoordinatesCorrectly() {
Person megan = new Person("megan", "tarash");
megan.setLocation(new GeoJsonPoint(41.85003D, -87.65005D));
@@ -325,7 +324,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findAllByExampleShouldProcessInheritanceCorrectly() {
public void findAllByExampleShouldProcessInheritanceCorrectly() {
PersonExtended reference = new PersonExtended();
reference.setLastname("Matthews");
@@ -341,7 +340,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void findOneByExampleShouldLookUpEntriesCorrectly() {
public void findOneByExampleShouldLookUpEntriesCorrectly() {
Person sample = new Person();
sample.setFirstname("Dave");
@@ -352,7 +351,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void existsByExampleShouldLookUpEntriesCorrectly() {
public void existsByExampleShouldLookUpEntriesCorrectly() {
Person sample = new Person();
sample.setFirstname("Dave");
@@ -363,7 +362,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1245
void countByExampleShouldLookUpEntriesCorrectly() {
public void countByExampleShouldLookUpEntriesCorrectly() {
Person sample = new Person();
sample.setLastname("Matthews");
@@ -373,7 +372,7 @@ class SimpleMongoRepositoryTests {
}
@Test // DATAMONGO-1896
void saveAllUsesEntityCollection() {
public void saveAllUsesEntityCollection() {
Person first = new PersonExtended();
first.setEmail("foo@bar.com");
@@ -385,7 +384,7 @@ class SimpleMongoRepositoryTests {
repository.deleteAll();
repository.saveAll(asList(first, second));
repository.saveAll(Arrays.asList(first, second));
assertThat(repository.findAll()).containsExactlyInAnyOrder(first, second);
}
@@ -393,7 +392,7 @@ class SimpleMongoRepositoryTests {
@Test // DATAMONGO-2130
@EnableIfReplicaSetAvailable
@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0")
void countShouldBePossibleInTransaction() {
public void countShouldBePossibleInTransaction() {
MongoTransactionManager txmgr = new MongoTransactionManager(template.getMongoDbFactory());
TransactionTemplate tt = new TransactionTemplate(txmgr);
@@ -417,7 +416,7 @@ class SimpleMongoRepositoryTests {
@Test // DATAMONGO-2130
@EnableIfReplicaSetAvailable
@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0")
void existsShouldBePossibleInTransaction() {
public void existsShouldBePossibleInTransaction() {
MongoTransactionManager txmgr = new MongoTransactionManager(template.getMongoDbFactory());
TransactionTemplate tt = new TransactionTemplate(txmgr);
@@ -436,15 +435,6 @@ class SimpleMongoRepositoryTests {
assertThat(exists).isTrue();
}
@Test // DATAMONGO-2652
void deleteAllByIds() {
repository.deleteAllById(asList(dave.getId(), carter.getId()));
assertThat(repository.findAll()) //
.hasSize(all.size() - 2).doesNotContain(dave, carter);
}
private void assertThatAllReferencePersonsWereStoredCorrectly(Map<String, Person> references, List<Person> saved) {
for (Person person : saved) {

View File

@@ -93,15 +93,6 @@ class KPropertyPathTests {
assertThat(property).isEqualTo("entity.book.author.name")
}
@Test // DATAMONGO-2661
fun `Convert nullable KProperty to field name`() {
class Cat(val name: String)
class Owner(val cat: Cat?)
val property = asString(Owner::cat / Cat::name)
assertThat(property).isEqualTo("cat.name")
}
class Book(val title: String, val author: Author)
class Author(val name: String)
}

View File

@@ -41,62 +41,7 @@ The starting point for learning about MongoDB is https://www.mongodb.org/[www.mo
The Spring Data MongoDB 3.x binaries require JDK level 8.0 and above and https://spring.io/docs[Spring Framework] {springVersion} and above.
In terms of document stores, you need at least version 3.6 of https://www.mongodb.org/[MongoDB], though we recommend a more recent version.
[[compatibility.matrix]]
=== Compatibility Matrix
The following compatibility matrix summarizes Spring Data versions to MongoDB driver/database versions.
Database versions show the highest supported server version that pass the Spring Data test suite.
You can use newer server versions unless your application uses functionality that is affected by <<compatibility.changes,changes in the MongoDB server>>.
[cols="h,m,m,m", options="header"]
|===
|Spring Data Release Train
|Spring Data MongoDB
|Driver Version
|Server Version
|2021.0
|3.2.x
|4.1.x
|4.4.x
|2020.0
|3.1.x
|4.1.x
|4.4.x
|Neumann
|3.0.x
|4.0.x
|4.4.x
|Moore
|2.2.x
|3.11.x/Reactive Streams 1.12.x
|4.2.x
|Lovelace
|2.1.x
|3.8.x/Reactive Streams 1.9.x
|4.0.x
|===
[[compatibility.changes]]
[[compatibility.changes-4.4]]
==== Relevant Changes in MongoDB 4.4
* Fields list must not contain text search score property when no `$text` criteria present. See also https://docs.mongodb.com/manual/reference/operator/query/text/[`$text` operator]
* Sort must not be an empty document when running map reduce.
[[compatibility.changes-4.2]]
==== Relevant Changes in MongoDB 4.2
* Removal of `geoNear` command. See also https://docs.mongodb.com/manual/release-notes/4.2-compatibility/#remove-support-for-the-geonear-command[Removal of `geoNear`]
* Removal of `eval` command. See also https://docs.mongodb.com/manual/release-notes/4.2-compatibility/#remove-support-for-the-eval-command[Removal of `eval`]
In terms of document stores, you need at least version 2.6 of https://www.mongodb.org/[MongoDB].
[[get-started:help]]
== Additional Help Resources

View File

@@ -17,10 +17,10 @@ public interface PersonRepository extends CrudReppsitory<Person, String> {
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }")
List<PersonAggregate> groupByLastnameAndFirstnames(Sort sort); <2>
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }")
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $?0 } } }")
List<PersonAggregate> groupByLastnameAnd(String property); <3>
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }")
@Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $?0 } } }")
List<PersonAggregate> groupByLastnameAnd(String property, Pageable page); <4>
@Aggregation("{ $group : { _id : null, total : { $sum : $age } } }")

View File

@@ -27,7 +27,7 @@ First, you need to set up a running MongoDB server. Refer to the https://docs.mo
To create a Spring project in STS:
. Go to File -> New -> Spring Template Project -> Simple Spring Utility Project, and press Yes when prompted. Then enter a project and a package name, such as `org.spring.mongodb.example`.
. Add the following to the pom.xml files `dependencies` element:
.Add the following to the pom.xml files `dependencies` element:
+
[source,xml,subs="+attributes"]
----

View File

@@ -1,76 +1,6 @@
Spring Data MongoDB Changelog
=============================
Changes in version 3.2.0-M1 (2020-12-09)
----------------------------------------
* DATAMONGO-2663 - Document Spring Data to MongoDB compatibility.
* DATAMONGO-2661 - Handle nullable types for KPropertyPath.
* DATAMONGO-2659 - Allow disk use on Query.
* DATAMONGO-2652 - Implement CrudRepository.delete(Iterable<ID> ids).
* DATAMONGO-2649 - Release 3.2 M1 (2021.0.0).
* DATAMONGO-2644 - Invalid reference '_id'! Error In spring data mongodb.
* DATAMONGO-2635 - Enums Don't Properly Serialize In Aggregation Pipeline Facet.
Changes in version 3.0.6.RELEASE (2020-12-09)
---------------------------------------------
* DATAMONGO-2663 - Document Spring Data to MongoDB compatibility.
* DATAMONGO-2661 - Handle nullable types for KPropertyPath.
* DATAMONGO-2647 - Release 3.0.6 (Neumann SR6).
* DATAMONGO-2644 - Invalid reference '_id'! Error In spring data mongodb.
Changes in version 2.2.12.RELEASE (2020-12-09)
----------------------------------------------
* DATAMONGO-2663 - Document Spring Data to MongoDB compatibility.
* DATAMONGO-2646 - Release 2.2.12 (Moore SR12).
Changes in version 3.1.1 (2020-11-11)
-------------------------------------
* DATAMONGO-2648 - Release 3.1.1 (2020.0.1).
* DATAMONGO-2644 - Invalid reference '_id'! Error In spring data mongodb.
* DATAMONGO-2635 - Enums Don't Properly Serialize In Aggregation Pipeline Facet.
Changes in version 3.1.0 (2020-10-28)
-------------------------------------
* DATAMONGO-2642 - Upgrade to MongoDB Driver 4.1.1.
* DATAMONGO-2639 - Release 3.1 GA (2020.0.0).
* DATAMONGO-2638 - Fix documentation issues.
Changes in version 3.0.5.RELEASE (2020-10-28)
---------------------------------------------
* DATAMONGO-2643 - Adopt to AssertJ API changes.
* DATAMONGO-2638 - Fix documentation issues.
* DATAMONGO-2633 - @Query annotation does not support $centerSphere.
* DATAMONGO-2625 - Release 3.0.5 (Neumann SR5).
Changes in version 2.2.11.RELEASE (2020-10-28)
----------------------------------------------
* DATAMONGO-2638 - Fix documentation issues.
* DATAMONGO-2633 - @Query annotation does not support $centerSphere.
* DATAMONGO-2624 - Release 2.2.11 (Moore SR11).
Changes in version 2.1.21.RELEASE (2020-10-28)
----------------------------------------------
* DATAMONGO-2641 - Release 2.1.21 (Lovelace SR21).
Changes in version 3.1.0-RC2 (2020-10-14)
-----------------------------------------
* DATAMONGO-2633 - @Query annotation does not support $centerSphere.
* DATAMONGO-2630 - Add support for suspend repository query methods returning List<T>.
* DATAMONGO-2626 - Release 3.1 RC2 (2020.0.0).
* DATAMONGO-2623 - Add support for custom Aggregation expressions.
* DATAMONGO-2622 - Add support for $unionWith aggregation.
* DATAMONGO-2596 - Introduce extension to render KProperty/KPropertyPath as property path.
* DATAMONGO-2294 - Support multiple parameters for query field projections.
Changes in version 3.1.0-RC1 (2020-09-16)
-----------------------------------------
* DATAMONGO-2621 - Adapt to changed array assertions in AssertJ.
@@ -3235,15 +3165,6 @@ Repository

View File

@@ -1,4 +1,4 @@
Spring Data MongoDB 3.2 M1 (2021.0.0)
Spring Data MongoDB 3.1 RC1 (2020.0.0)
Copyright (c) [2010-2019] Pivotal Software, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License").
@@ -18,6 +18,3 @@ conditions of the subcomponent's license, as noted in the LICENSE file.