Compare commits

...

8 Commits
3.2.3 ... 3.1.1

Author SHA1 Message Date
Mark Paluch
595fde7b04 DATAMONGO-2648 - Release version 3.1.1 (2020.0.1). 2020-11-11 11:58:58 +01:00
Mark Paluch
01f4e73b48 DATAMONGO-2648 - Prepare 3.1.1 (2020.0.1). 2020-11-11 11:58:35 +01:00
Mark Paluch
2934c4886b DATAMONGO-2648 - Updated changelog. 2020-11-11 11:58:18 +01:00
Christoph Strobl
080c798721 DATAMONGO-2644 - ProjectOperation no longer errors on inclusion of default _id field.
Original pull request: #890.
2020-11-10 09:40:49 +01:00
Christoph Strobl
7cfb68e6be DATAMONGO-2635 - Enforce aggregation pipeline mapping.
Avoid using the Aggregation.DEFAULT_CONTEXT which does not map contained values to the according MongoDB representation. We now use a relaxed aggregation context, preserving given field names, where possible.

Original pull request: #890.
2020-11-10 09:40:49 +01:00
Mark Paluch
1e24abe8e5 DATAMONGO-2639 - Enable maintenance branch build. 2020-10-29 09:43:58 +01:00
Mark Paluch
a316d156dc DATAMONGO-2639 - After release cleanups. 2020-10-28 16:10:54 +01:00
Mark Paluch
6563b125eb DATAMONGO-2639 - Prepare next development iteration. 2020-10-28 16:10:50 +01:00
15 changed files with 90 additions and 26 deletions

10
Jenkinsfile vendored
View File

@@ -3,7 +3,7 @@ pipeline {
triggers { triggers {
pollSCM 'H/10 * * * *' pollSCM 'H/10 * * * *'
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS) upstream(upstreamProjects: "spring-data-commons/2.4.x", threshold: hudson.model.Result.SUCCESS)
} }
options { options {
@@ -68,7 +68,7 @@ pipeline {
stage("test: baseline (jdk8)") { stage("test: baseline (jdk8)") {
when { when {
anyOf { anyOf {
branch 'master' branch '3.1.x'
not { triggeredBy 'UpstreamCause' } not { triggeredBy 'UpstreamCause' }
} }
} }
@@ -94,7 +94,7 @@ pipeline {
stage("Test other configurations") { stage("Test other configurations") {
when { when {
allOf { allOf {
branch 'master' branch '3.1.x'
not { triggeredBy 'UpstreamCause' } not { triggeredBy 'UpstreamCause' }
} }
} }
@@ -164,7 +164,7 @@ pipeline {
stage('Release to artifactory') { stage('Release to artifactory') {
when { when {
anyOf { anyOf {
branch 'master' branch '3.1.x'
not { triggeredBy 'UpstreamCause' } not { triggeredBy 'UpstreamCause' }
} }
} }
@@ -196,7 +196,7 @@ pipeline {
stage('Publish documentation') { stage('Publish documentation') {
when { when {
branch 'master' branch '3.1.x'
} }
agent { agent {
docker { docker {

View File

@@ -5,7 +5,7 @@
<groupId>org.springframework.data</groupId> <groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId> <artifactId>spring-data-mongodb-parent</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>Spring Data MongoDB</name> <name>Spring Data MongoDB</name>
@@ -15,7 +15,7 @@
<parent> <parent>
<groupId>org.springframework.data.build</groupId> <groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId> <artifactId>spring-data-parent</artifactId>
<version>2.4.0</version> <version>2.4.1</version>
</parent> </parent>
<modules> <modules>
@@ -26,7 +26,7 @@
<properties> <properties>
<project.type>multi</project.type> <project.type>multi</project.type>
<dist.id>spring-data-mongodb</dist.id> <dist.id>spring-data-mongodb</dist.id>
<springdata.commons>2.4.0</springdata.commons> <springdata.commons>2.4.1</springdata.commons>
<mongo>4.1.1</mongo> <mongo>4.1.1</mongo>
<mongo.reactivestreams>${mongo}</mongo.reactivestreams> <mongo.reactivestreams>${mongo}</mongo.reactivestreams>
<jmh.version>1.19</jmh.version> <jmh.version>1.19</jmh.version>

View File

@@ -7,7 +7,7 @@
<parent> <parent>
<groupId>org.springframework.data</groupId> <groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId> <artifactId>spring-data-mongodb-parent</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@@ -14,7 +14,7 @@
<parent> <parent>
<groupId>org.springframework.data</groupId> <groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId> <artifactId>spring-data-mongodb-parent</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@@ -11,7 +11,7 @@
<parent> <parent>
<groupId>org.springframework.data</groupId> <groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId> <artifactId>spring-data-mongodb-parent</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@@ -77,7 +77,7 @@ class AggregationUtil {
} }
if (!(aggregation instanceof TypedAggregation)) { if (!(aggregation instanceof TypedAggregation)) {
return Aggregation.DEFAULT_CONTEXT; return new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper);
} }
Class<?> inputType = ((TypedAggregation) aggregation).getInputType(); Class<?> inputType = ((TypedAggregation) aggregation).getInputType();
@@ -98,7 +98,7 @@ class AggregationUtil {
*/ */
List<Document> createPipeline(Aggregation aggregation, AggregationOperationContext context) { List<Document> createPipeline(Aggregation aggregation, AggregationOperationContext context) {
if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) { if (ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
return aggregation.toPipeline(context); return aggregation.toPipeline(context);
} }

View File

@@ -707,10 +707,9 @@ class QueryOperations {
*/ */
List<Document> getUpdatePipeline(@Nullable Class<?> domainType) { List<Document> getUpdatePipeline(@Nullable Class<?> domainType) {
AggregationOperationContext context = domainType != null Class<?> type = domainType != null ? domainType : Object.class;
? new RelaxedTypeBasedAggregationOperationContext(domainType, mappingContext, queryMapper)
: Aggregation.DEFAULT_CONTEXT;
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, queryMapper);
return aggregationUtil.createPipeline((AggregationUpdate) update, context); return aggregationUtil.createPipeline((AggregationUpdate) update, context);
} }

View File

@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
import static org.springframework.data.mongodb.core.query.SerializationUtils.*; import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2; import reactor.util.function.Tuple2;
@@ -2112,7 +2113,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
AggregationOperationContext context = agg instanceof TypedAggregation AggregationOperationContext context = agg instanceof TypedAggregation
? new TypeBasedAggregationOperationContext(((TypedAggregation<?>) agg).getInputType(), ? new TypeBasedAggregationOperationContext(((TypedAggregation<?>) agg).getInputType(),
getConverter().getMappingContext(), queryMapper) getConverter().getMappingContext(), queryMapper)
: Aggregation.DEFAULT_CONTEXT; : new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper);
return agg.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument", return agg.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument",
Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns"))); Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns")));

View File

@@ -264,7 +264,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
return new Document(getOperator(), fieldObject); return new Document(getOperator(), fieldObject);
} }
/* /*
* (non-Javadoc) * (non-Javadoc)
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator() * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator()
*/ */
@@ -1450,6 +1450,14 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
return field.getTarget(); return field.getTarget();
} }
if (field.getTarget().equals(Fields.UNDERSCORE_ID)) {
try {
return context.getReference(field).getReferenceValue();
} catch (java.lang.IllegalArgumentException e) {
return Fields.UNDERSCORE_ID_REF;
}
}
// check whether referenced field exists in the context // check whether referenced field exists in the context
return context.getReference(field).getReferenceValue(); return context.getReference(field).getReferenceValue();

View File

@@ -21,6 +21,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.bson.Document; import org.bson.Document;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.PersistentPropertyPath; import org.springframework.data.mapping.PersistentPropertyPath;
import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
@@ -29,6 +30,7 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldRefe
import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.util.Lazy;
import org.springframework.lang.Nullable; import org.springframework.lang.Nullable;
import org.springframework.util.Assert; import org.springframework.util.Assert;
@@ -46,6 +48,7 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
private final Class<?> type; private final Class<?> type;
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext; private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
private final QueryMapper mapper; private final QueryMapper mapper;
private final Lazy<MongoPersistentEntity<?>> entity;
/** /**
* Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and * Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and
@@ -65,6 +68,7 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
this.type = type; this.type = type;
this.mappingContext = mappingContext; this.mappingContext = mappingContext;
this.mapper = mapper; this.mapper = mapper;
this.entity = Lazy.of(() -> mappingContext.getPersistentEntity(type));
} }
/* /*
@@ -151,10 +155,14 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
protected FieldReference getReferenceFor(Field field) { protected FieldReference getReferenceFor(Field field) {
if(entity.getNullable() == null) {
return new DirectFieldReference(new ExposedField(field, true));
}
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext
.getPersistentPropertyPath(field.getTarget(), type); .getPersistentPropertyPath(field.getTarget(), type);
Field mappedField = field(field.getName(), Field mappedField = field(field.getName(),
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)); propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE));
return new DirectFieldReference(new ExposedField(mappedField, true)); return new DirectFieldReference(new ExposedField(mappedField, true));
} }

View File

@@ -142,12 +142,8 @@ public class UnionWithOperation implements AggregationOperation {
private AggregationOperationContext computeContext(AggregationOperationContext source) { private AggregationOperationContext computeContext(AggregationOperationContext source) {
if (domainType == null) {
return Aggregation.DEFAULT_CONTEXT;
}
if (source instanceof TypeBasedAggregationOperationContext) { if (source instanceof TypeBasedAggregationOperationContext) {
return ((TypeBasedAggregationOperationContext) source).continueOnMissingFieldReference(domainType); return ((TypeBasedAggregationOperationContext) source).continueOnMissingFieldReference(domainType != null ? domainType : Object.class);
} }
if (source instanceof ExposedFieldsAggregationOperationContext) { if (source instanceof ExposedFieldsAggregationOperationContext) {

View File

@@ -1928,6 +1928,22 @@ public class AggregationTests {
assertThat(results.getRawResults()).isEmpty(); assertThat(results.getRawResults()).isEmpty();
} }
@Test // DATAMONGO-2635
void mapsEnumsInMatchClauseUsingInCriteriaCorrectly() {
WithEnum source = new WithEnum();
source.enumValue = MyEnum.TWO;
source.id = "id-1";
mongoTemplate.save(source);
Aggregation agg = newAggregation(match(where("enumValue").in(Collections.singletonList(MyEnum.TWO))));
AggregationResults<Document> results = mongoTemplate.aggregate(agg, mongoTemplate.getCollectionName(WithEnum.class),
Document.class);
assertThat(results.getMappedResults()).hasSize(1);
}
private void createUsersWithReferencedPersons() { private void createUsersWithReferencedPersons() {
mongoTemplate.dropCollection(User.class); mongoTemplate.dropCollection(User.class);
@@ -2240,4 +2256,15 @@ public class AggregationTests {
String p1; String p1;
String p2; String p2;
} }
static enum MyEnum {
ONE, TWO
}
@lombok.Data
static class WithEnum {
@Id String id;
MyEnum enumValue;
}
} }

View File

@@ -29,6 +29,11 @@ import org.bson.Document;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.data.domain.Sort.Direction; import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond; import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond;
import org.springframework.data.mongodb.core.aggregation.ProjectionOperationUnitTests.BookWithFieldAnnotation;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Criteria;
/** /**
@@ -582,6 +587,17 @@ public class AggregationUnitTests {
"{\"attributeRecordArrays\": {\"$reduce\": {\"input\": \"$attributeRecordArrays\", \"initialValue\": [], \"in\": {\"$concatArrays\": [\"$$value\", \"$$this\"]}}}}")); "{\"attributeRecordArrays\": {\"$reduce\": {\"input\": \"$attributeRecordArrays\", \"initialValue\": [], \"in\": {\"$concatArrays\": [\"$$value\", \"$$this\"]}}}}"));
} }
@Test // DATAMONGO-2644
void projectOnIdIsAlwaysValid() {
MongoMappingContext mappingContext = new MongoMappingContext();
Document target = new Aggregation(bucket("start"), project("_id")).toDocument("collection-1",
new RelaxedTypeBasedAggregationOperationContext(BookWithFieldAnnotation.class, mappingContext,
new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))));
assertThat(extractPipelineElement(target, 1, "$project")).isEqualTo(Document.parse(" { \"_id\" : \"$_id\" }"));
}
private Document extractPipelineElement(Document agg, int index, String operation) { private Document extractPipelineElement(Document agg, int index, String operation) {
List<Document> pipeline = (List<Document>) agg.get("pipeline"); List<Document> pipeline = (List<Document>) agg.get("pipeline");

View File

@@ -1,6 +1,13 @@
Spring Data MongoDB Changelog Spring Data MongoDB Changelog
============================= =============================
Changes in version 3.1.1 (2020-11-11)
-------------------------------------
* DATAMONGO-2648 - Release 3.1.1 (2020.0.1).
* DATAMONGO-2644 - Invalid reference '_id'! Error In spring data mongodb.
* DATAMONGO-2635 - Enums Don't Properly Serialize In Aggregation Pipeline Facet.
Changes in version 3.1.0 (2020-10-28) Changes in version 3.1.0 (2020-10-28)
------------------------------------- -------------------------------------
* DATAMONGO-2642 - Upgrade to MongoDB Driver 4.1.1. * DATAMONGO-2642 - Upgrade to MongoDB Driver 4.1.1.
@@ -3216,5 +3223,6 @@ Repository

View File

@@ -1,4 +1,4 @@
Spring Data MongoDB 3.1 GA (2020.0.0) Spring Data MongoDB 3.1.1 (2020.0.1)
Copyright (c) [2010-2019] Pivotal Software, Inc. Copyright (c) [2010-2019] Pivotal Software, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License"). This product is licensed to you under the Apache License, Version 2.0 (the "License").
@@ -20,3 +20,4 @@ conditions of the subcomponent's license, as noted in the LICENSE file.