Compare commits

..

4 Commits

Author SHA1 Message Date
Mark Paluch
a9b2b8b198 Polishing.
Remove sysout from tests.

See #4491
2023-09-01 12:20:30 +02:00
Mark Paluch
4d82856c04 Consistently use the same reading strategies to read associations.
Return the value to set instead of calling the accessor directly. Remove duplicate calls to resolve associations.

See #4491
2023-09-01 12:19:59 +02:00
Mark Paluch
4f3471973e Correctly read unwrapped properties during constructor creation.
Closes #4491
2023-09-01 12:02:06 +02:00
Mark Paluch
70653ac4fc Prepare issue branch. 2023-09-01 12:01:01 +02:00
20 changed files with 88 additions and 396 deletions

83
Jenkinsfile vendored
View File

@@ -96,44 +96,6 @@ pipeline {
}
}
}
stage('Publish JDK (Java 17) + MongoDB 7.0') {
when {
anyOf {
changeset "ci/openjdk17-mongodb-7.0/**"
changeset "ci/pipeline.properties"
}
}
agent { label 'data' }
options { timeout(time: 30, unit: 'MINUTES') }
steps {
script {
def image = docker.build("springci/spring-data-with-mongodb-7.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.7.0.version']} ci/openjdk17-mongodb-7.0/")
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
image.push()
}
}
}
}
stage('Publish JDK (Java 20) + MongoDB 7.0') {
when {
anyOf {
changeset "ci/openjdk20-mongodb-7.0/**"
changeset "ci/pipeline.properties"
}
}
agent { label 'data' }
options { timeout(time: 30, unit: 'MINUTES') }
steps {
script {
def image = docker.build("springci/spring-data-with-mongodb-7.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.7.0.version']} ci/openjdk20-mongodb-7.0/")
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
image.push()
}
}
}
}
}
}
@@ -175,6 +137,7 @@ pipeline {
}
}
parallel {
stage("test: MongoDB 5.0 (main)") {
agent {
label 'data'
@@ -240,50 +203,6 @@ pipeline {
}
}
}
stage("test: MongoDB 7.0 (main)") {
agent {
label 'data'
}
options { timeout(time: 30, unit: 'MINUTES') }
environment {
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
}
steps {
script {
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-7.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
sh 'sleep 10'
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
sh 'sleep 15'
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
}
}
}
}
stage("test: MongoDB 7.0 (next)") {
agent {
label 'data'
}
options { timeout(time: 30, unit: 'MINUTES') }
environment {
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
}
steps {
script {
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-7.0:${p['java.next.tag']}").inside(p['docker.java.inside.basic']) {
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
sh 'sleep 10'
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
sh 'sleep 15'
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
}
}
}
}
}
}

View File

@@ -1,24 +0,0 @@
ARG BASE
FROM ${BASE}
# Any ARG statements before FROM are cleared.
ARG MONGODB
ENV TZ=Etc/UTC
ENV DEBIAN_FRONTEND=noninteractive
RUN set -eux; \
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
# MongoDB 6.0 release signing key
wget -qO - https://www.mongodb.org/static/pgp/server-7.0.asc | apt-key add - && \
# Needed when MongoDB creates a 7.0 folder.
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/7.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-7.0.list && \
echo ${TZ} > /etc/timezone
RUN apt-get update && \
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

View File

@@ -1,24 +0,0 @@
ARG BASE
FROM ${BASE}
# Any ARG statements before FROM are cleared.
ARG MONGODB
ENV TZ=Etc/UTC
ENV DEBIAN_FRONTEND=noninteractive
RUN set -eux; \
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
# MongoDB 7.0 release signing key
wget -qO - https://www.mongodb.org/static/pgp/server-7.0.asc | apt-key add - && \
# Needed when MongoDB creates a 7.0 folder.
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/7.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
echo ${TZ} > /etc/timezone
RUN apt-get update && \
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

View File

@@ -10,7 +10,6 @@ docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/ecli
docker.mongodb.4.4.version=4.4.23
docker.mongodb.5.0.version=5.0.19
docker.mongodb.6.0.version=6.0.8
docker.mongodb.7.0.version=7.0.1
# Supported versions of Redis
docker.redis.6.version=6.2.13

View File

@@ -5,7 +5,7 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.2.x-4473-SNAPSHOT</version>
<version>4.2.0-GH-4491-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Spring Data MongoDB</name>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.2.x-4473-SNAPSHOT</version>
<version>4.2.0-GH-4491-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -15,7 +15,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.2.x-4473-SNAPSHOT</version>
<version>4.2.0-GH-4491-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -13,7 +13,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.2.x-4473-SNAPSHOT</version>
<version>4.2.0-GH-4491-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -15,11 +15,8 @@
*/
package org.springframework.data.mongodb.core.aggregation;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.bson.Document;
import org.springframework.util.Assert;
@@ -28,7 +25,6 @@ import org.springframework.util.Assert;
* Gateway to {@literal accumulator} aggregation operations.
*
* @author Christoph Strobl
* @author Julia Lee
* @since 1.10
* @soundtrack Rage Against The Machine - Killing In The Name
*/
@@ -56,7 +52,6 @@ public class AccumulatorOperators {
/**
* @author Christoph Strobl
* @author Julia Lee
*/
public static class AccumulatorOperatorFactory {
@@ -251,20 +246,6 @@ public class AccumulatorOperators {
};
}
/**
* Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the
* associated numeric value expression.
*
* @return new instance of {@link Percentile}.
* @param percentages must not be {@literal null}.
* @since 4.2
*/
public Percentile percentile(Double... percentages) {
Percentile percentile = usesFieldRef() ? Percentile.percentileOf(fieldReference)
: Percentile.percentileOf(expression);
return percentile.percentages(percentages);
}
private boolean usesFieldRef() {
return fieldReference != null;
}
@@ -996,90 +977,4 @@ public class AccumulatorOperators {
return "$expMovingAvg";
}
}
/**
* {@link AggregationExpression} for {@code $percentile}.
*
* @author Julia Lee
* @since 4.2
*/
public static class Percentile extends AbstractAggregationExpression {
private Percentile(Object value) {
super(value);
}
/**
* Creates new {@link Percentile}.
*
* @param fieldReference must not be {@literal null}.
* @return new instance of {@link Percentile}.
*/
public static Percentile percentileOf(String fieldReference) {
Assert.notNull(fieldReference, "FieldReference must not be null");
Map<String, Object> fields = new HashMap<>();
fields.put("input", Fields.field(fieldReference));
fields.put("method", "approximate");
return new Percentile(fields);
}
/**
* Creates new {@link Percentile}.
*
* @param expression must not be {@literal null}.
* @return new instance of {@link Percentile}.
*/
public static Percentile percentileOf(AggregationExpression expression) {
Assert.notNull(expression, "Expression must not be null");
Map<String, Object> fields = new HashMap<>();
fields.put("input", expression);
fields.put("method", "approximate");
return new Percentile(fields);
}
/**
* Define the percentile value(s) that must resolve to percentages in the range {@code 0.0 - 1.0} inclusive.
*
* @param percentages must not be {@literal null}.
* @return new instance of {@link Percentile}.
*/
public Percentile percentages(Double... percentages) {
Assert.notEmpty(percentages, "Percentages must not be null or empty");
return new Percentile(append("p", Arrays.asList(percentages)));
}
/**
* Creates new {@link Percentile} with all previously added inputs appending the given one. <br />
* <strong>NOTE:</strong> Only possible in {@code $project} stage.
*
* @param fieldReference must not be {@literal null}.
* @return new instance of {@link Percentile}.
*/
public Percentile and(String fieldReference) {
Assert.notNull(fieldReference, "FieldReference must not be null");
return new Percentile(appendTo("input", Fields.field(fieldReference)));
}
/**
* Creates new {@link Percentile} with all previously added inputs appending the given one. <br />
* <strong>NOTE:</strong> Only possible in {@code $project} stage.
*
* @param expression must not be {@literal null}.
* @return new instance of {@link Percentile}.
*/
public Percentile and(AggregationExpression expression) {
Assert.notNull(expression, "Expression must not be null");
return new Percentile(appendTo("input", expression));
}
@Override
protected String getMongoMethod() {
return "$percentile";
}
}
}

View File

@@ -25,7 +25,6 @@ import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Co
import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp;
import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Max;
import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Min;
import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Percentile;
import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop;
import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp;
import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum;
@@ -42,7 +41,6 @@ import org.springframework.util.StringUtils;
* @author Christoph Strobl
* @author Mark Paluch
* @author Mushtaq Ahmed
* @author Julia Lee
* @since 1.10
*/
public class ArithmeticOperators {
@@ -934,20 +932,6 @@ public class ArithmeticOperators {
return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit);
}
/**
* Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the
* numeric value.
*
* @return new instance of {@link Percentile}.
* @param percentages must not be {@literal null}.
* @since 4.2
*/
public Percentile percentile(Double... percentages) {
Percentile percentile = usesFieldRef() ? AccumulatorOperators.Percentile.percentileOf(fieldReference)
: AccumulatorOperators.Percentile.percentileOf(expression);
return percentile.percentages(percentages);
}
private boolean usesFieldRef() {
return fieldReference != null;
}

View File

@@ -17,7 +17,16 @@ package org.springframework.data.mongodb.core.convert;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@@ -41,7 +50,13 @@ import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.data.annotation.Reference;
import org.springframework.data.convert.CustomConversions;
import org.springframework.data.convert.TypeMapper;
import org.springframework.data.mapping.*;
import org.springframework.data.mapping.Association;
import org.springframework.data.mapping.InstanceCreatorMetadata;
import org.springframework.data.mapping.MappingException;
import org.springframework.data.mapping.Parameter;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.PersistentProperty;
import org.springframework.data.mapping.PersistentPropertyAccessor;
import org.springframework.data.mapping.callback.EntityCallbacks;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
@@ -492,7 +507,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
S instance = instantiator.createInstance(entity, provider);
if (entity.requiresPropertyPopulation()) {
return populateProperties(context, entity, documentAccessor, evaluator, instance);
}
@@ -571,14 +585,18 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
ConversionContext propertyContext = context.forProperty(prop);
MongoDbPropertyValueProvider valueProviderToUse = valueProvider.withContext(propertyContext);
if (prop.isAssociation() && !entity.isCreatorArgument(prop)) {
if (prop.isAssociation()) {
if (callback == null) {
callback = getDbRefResolverCallback(propertyContext, documentAccessor, evaluator);
}
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback,
propertyContext, evaluator);
Object value = readAssociation(prop.getRequiredAssociation(), documentAccessor, dbRefProxyHandler, callback,
propertyContext);
if (value != null) {
accessor.setProperty(prop, value);
}
continue;
}
@@ -593,17 +611,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
continue;
}
if (prop.isAssociation()) {
if (callback == null) {
callback = getDbRefResolverCallback(propertyContext, documentAccessor, evaluator);
}
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback,
propertyContext, evaluator);
continue;
}
accessor.setProperty(prop, valueProviderToUse.getPropertyValue(prop));
}
}
@@ -615,9 +622,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
(prop, bson, e, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, e));
}
private void readAssociation(Association<MongoPersistentProperty> association, PersistentPropertyAccessor<?> accessor,
@Nullable
private Object readAssociation(Association<MongoPersistentProperty> association,
DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback,
ConversionContext context, SpELExpressionEvaluator evaluator) {
ConversionContext context) {
MongoPersistentProperty property = association.getInverse();
Object value = documentAccessor.get(property);
@@ -630,30 +638,27 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) {
if (value == null) {
return;
return null;
}
DocumentPointer<?> pointer = () -> value;
// collection like special treatment
accessor.setProperty(property, conversionService.convert(pointer, property.getActualType()));
return conversionService.convert(pointer, property.getActualType());
} else {
accessor.setProperty(property,
dbRefResolver.resolveReference(property,
return dbRefResolver.resolveReference(property,
new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)),
referenceLookupDelegate, context.forProperty(property)::convert));
referenceLookupDelegate, context.forProperty(property)::convert);
}
return;
}
if (value == null) {
return;
return null;
}
if (value instanceof DBRef dbref) {
accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler));
return;
return dbRefResolver.resolveDbRef(property, dbref, callback, handler);
}
/*
@@ -664,18 +669,18 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
if (value instanceof Document document) {
if (property.isMap()) {
if (document.isEmpty() || peek(document.values()) instanceof DBRef) {
accessor.setProperty(property, dbRefResolver.resolveDbRef(property, null, callback, handler));
return dbRefResolver.resolveDbRef(property, null, callback, handler);
} else {
accessor.setProperty(property, readMap(context, document, property.getTypeInformation()));
return readMap(context, document, property.getTypeInformation());
}
} else {
accessor.setProperty(property, read(property.getActualType(), document));
return read(property.getActualType(), document);
}
} else if (value instanceof Collection<?> collection && !collection.isEmpty()
&& peek(collection) instanceof Document) {
accessor.setProperty(property, readCollectionOrArray(context, collection, property.getTypeInformation()));
return readCollectionOrArray(context, collection, property.getTypeInformation());
} else {
accessor.setProperty(property, dbRefResolver.resolveDbRef(property, null, callback, handler));
return dbRefResolver.resolveDbRef(property, null, callback, handler);
}
}
@@ -1961,25 +1966,26 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
@SuppressWarnings("unchecked")
public <T> T getPropertyValue(MongoPersistentProperty property) {
if (property.isDbReference() && property.getDBRef().lazy()) {
ConversionContext propertyContext = context.forProperty(property);
Object rawRefValue = accessor.get(property);
if (rawRefValue == null) {
return null;
}
if (property.isAssociation()) {
DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), context.getPath(),
evaluator, (prop, bson, evaluator, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson,
evaluator));
DBRef dbref = rawRefValue instanceof DBRef dbRef ? dbRef : null;
return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler);
return (T) readAssociation(property.getRequiredAssociation(), accessor, dbRefProxyHandler, callback,
propertyContext);
}
if (property.isDocumentReference()) {
return (T) dbRefResolver.resolveReference(property,
new DocumentReferenceSource(accessor.getDocument(), accessor.get(property)),
referenceLookupDelegate, context::convert);
if (property.isUnwrapped()) {
return (T) readUnwrapped(propertyContext, accessor, property,
mappingContext.getRequiredPersistentEntity(property));
}
if (!accessor.hasValue(property)) {
return null;
}
return super.getPropertyValue(property);

View File

@@ -35,7 +35,6 @@ import org.springframework.util.ObjectUtils;
* @author Sebastien Gerard
* @author Christoph Strobl
* @author Mark Paluch
* @author Julia Lee
*/
public class MethodReferenceNode extends ExpressionNode {
@@ -229,8 +228,6 @@ public class MethodReferenceNode extends ExpressionNode {
.mappingParametersTo("n", "input"));
map.put("minN", mapArgRef().forOperator("$minN") //
.mappingParametersTo("n", "input"));
map.put("percentile", mapArgRef().forOperator("$percentile") //
.mappingParametersTo("input", "p", "method"));
// TYPE OPERATORS
map.put("type", singleArgRef().forOperator("$type"));

View File

@@ -31,7 +31,6 @@ import org.springframework.data.mongodb.util.aggregation.TestAggregationContext;
* Unit tests for {@link AccumulatorOperators}.
*
* @author Christoph Strobl
* @author Julia Lee
*/
class AccumulatorOperatorsUnitTests {
@@ -109,29 +108,6 @@ class AccumulatorOperatorsUnitTests {
.isEqualTo(Document.parse("{ $minN: { n: 3, input : \"$price\" } }"));
}
@Test // GH-4473
void rendersPercentileWithFieldReference() {
assertThat(valueOf("score").percentile(0.2).toDocument(Aggregation.DEFAULT_CONTEXT))
.isEqualTo(Document.parse("{ $percentile: { input: \"$score\", method: \"approximate\", p: [0.2] } }"));
assertThat(valueOf("score").percentile(0.3, 0.9).toDocument(Aggregation.DEFAULT_CONTEXT))
.isEqualTo(Document.parse("{ $percentile: { input: \"$score\", method: \"approximate\", p: [0.3, 0.9] } }"));
assertThat(valueOf("score").percentile(0.3, 0.9).and("scoreTwo").toDocument(Aggregation.DEFAULT_CONTEXT))
.isEqualTo(Document.parse("{ $percentile: { input: [\"$score\", \"$scoreTwo\"], method: \"approximate\", p: [0.3, 0.9] } }"));
}
@Test // GH-4473
void rendersPercentileWithExpression() {
assertThat(valueOf(Sum.sumOf("score")).percentile(0.1).toDocument(Aggregation.DEFAULT_CONTEXT))
.isEqualTo(Document.parse("{ $percentile: { input: {\"$sum\": \"$score\"}, method: \"approximate\", p: [0.1] } }"));
assertThat(valueOf("scoreOne").percentile(0.1, 0.2).and(Sum.sumOf("scoreTwo")).toDocument(Aggregation.DEFAULT_CONTEXT))
.isEqualTo(Document.parse("{ $percentile: { input: [\"$scoreOne\", {\"$sum\": \"$scoreTwo\"}], method: \"approximate\", p: [0.1, 0.2] } }"));
}
static class Jedi {
String name;

View File

@@ -1893,25 +1893,6 @@ public class AggregationTests {
assertThat(categorizeByYear).hasSize(3);
}
@Test // GH-4473
@EnableIfMongoServerVersion(isGreaterThanEqual = "7.0")
void percentileShouldBeAppliedCorrectly() {
mongoTemplate.insert(new DATAMONGO788(15, 16));
mongoTemplate.insert(new DATAMONGO788(17, 18));
Aggregation agg = Aggregation.newAggregation(
project().and(ArithmeticOperators.valueOf("x").percentile(0.9).and("y"))
.as("ninetiethPercentile"));
AggregationResults<Document> result = mongoTemplate.aggregate(agg, DATAMONGO788.class, Document.class);
// MongoDB server returns $percentile as an array of doubles
List<Document> rawResults = (List<Document>) result.getRawResults().get("results");
assertThat((List<Object>) rawResults.get(0).get("ninetiethPercentile")).containsExactly(16.0);
assertThat((List<Object>) rawResults.get(1).get("ninetiethPercentile")).containsExactly(18.0);
}
@Test // DATAMONGO-1986
void runMatchOperationCriteriaThroughQueryMapperForTypedAggregation() {

View File

@@ -25,7 +25,6 @@ import org.junit.jupiter.api.Test;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.DocumentTestUtils;
import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Percentile;
import org.springframework.data.mongodb.core.aggregation.SelectionOperators.Bottom;
import org.springframework.data.mongodb.core.query.Criteria;
@@ -35,7 +34,6 @@ import org.springframework.data.mongodb.core.query.Criteria;
* @author Oliver Gierke
* @author Thomas Darimont
* @author Gustavo de Geus
* @author Julia Lee
*/
class GroupOperationUnitTests {
@@ -268,18 +266,6 @@ class GroupOperationUnitTests {
Document.parse("{ $bottom : { output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"));
}
@Test // GH-4473
void groupOperationAllowsAddingFieldWithPercentileAggregationExpression() {
GroupOperation groupOperation = Aggregation.group("id").and("scorePercentile",
Percentile.percentileOf("score").percentages(0.2));
Document groupClause = extractDocumentFromGroupOperation(groupOperation);
assertThat(groupClause).containsEntry("scorePercentile",
Document.parse("{ $percentile : { input: \"$score\", method: \"approximate\", p: [0.2]}}"));
}
private Document extractDocumentFromGroupOperation(GroupOperation groupOperation) {
Document document = groupOperation.toDocument(Aggregation.DEFAULT_CONTEXT);
Document groupClause = DocumentTestUtils.getAsDocument(document, "$group");

View File

@@ -2241,26 +2241,6 @@ public class ProjectionOperationUnitTests {
"{ $project: { \"author\" : 1, \"myArray\" : [ \"$ti_t_le\", \"plain - string\", { \"$sum\" : [\"$ti_t_le\", 10] } ] } } ] }"));
}
@Test // GH-4473
void shouldRenderPercentileAggregationExpression() {
Document agg = project()
.and(ArithmeticOperators.valueOf("score").percentile(0.3, 0.9)).as("scorePercentiles")
.toDocument(Aggregation.DEFAULT_CONTEXT);
assertThat(agg).isEqualTo(Document.parse("{ $project: { scorePercentiles: { $percentile: { input: \"$score\", method: \"approximate\", p: [0.3, 0.9] } }} } }"));
}
@Test // GH-4473
void shouldRenderPercentileWithMultipleArgsAggregationExpression() {
Document agg = project()
.and(ArithmeticOperators.valueOf("scoreOne").percentile(0.4).and("scoreTwo")).as("scorePercentiles")
.toDocument(Aggregation.DEFAULT_CONTEXT);
assertThat(agg).isEqualTo(Document.parse("{ $project: { scorePercentiles: { $percentile: { input: [\"$scoreOne\", \"$scoreTwo\"], method: \"approximate\", p: [0.4] } }} } }"));
}
private static Document extractOperation(String field, Document fromProjectClause) {
return (Document) fromProjectClause.get(field);
}

View File

@@ -33,7 +33,6 @@ import org.springframework.data.mongodb.core.Person;
* @author Oliver Gierke
* @author Christoph Strobl
* @author Divya Srivastava
* @author Julia Lee
*/
public class SpelExpressionTransformerUnitTests {
@@ -1256,19 +1255,7 @@ public class SpelExpressionTransformerUnitTests {
void shouldRenderLocf() {
assertThat(transform("locf(price)")).isEqualTo("{ $locf: \"$price\" }");
}
@Test // GH-4473
void shouldRenderPercentile() {
assertThat(transform("percentile(new String[]{\"$scoreOne\", \"$scoreTwo\" }, new double[]{0.4}, \"approximate\")"))
.isEqualTo("{ $percentile : { input : [\"$scoreOne\", \"$scoreTwo\"], p : [0.4], method : \"approximate\" }}");
assertThat(transform("percentile(score, new double[]{0.4, 0.85}, \"approximate\")"))
.isEqualTo("{ $percentile : { input : \"$score\", p : [0.4, 0.85], method : \"approximate\" }}");
assertThat(transform("percentile(\"$score\", new double[]{0.4, 0.85}, \"approximate\")"))
.isEqualTo("{ $percentile : { input : \"$score\", p : [0.4, 0.85], method : \"approximate\" }}");
}
private Document transform(String expression, Object... params) {
return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params);
}

View File

@@ -2346,6 +2346,24 @@ class MappingMongoConverterUnitTests {
.isEqualTo(expected);
}
@Test // GH-4491
void readUnwrappedTypeWithComplexValueUsingConstructor() {
org.bson.Document source = new org.bson.Document("_id", "id-1").append("stringValue", "hello").append("address",
new org.bson.Document("s", "1007 Mountain Drive").append("city", "Gotham"));
WithUnwrappedConstructor target = converter.read(WithUnwrappedConstructor.class, source);
Address expected = new Address();
expected.city = "Gotham";
expected.street = "1007 Mountain Drive";
assertThat(target.embeddableValue.stringValue) //
.isEqualTo("hello");
assertThat(target.embeddableValue.address) //
.isEqualTo(expected);
}
@Test // DATAMONGO-1902
void writeUnwrappedTypeWithComplexValue() {
@@ -3422,6 +3440,18 @@ class MappingMongoConverterUnitTests {
@Unwrapped.Nullable EmbeddableType embeddableValue;
}
static class WithUnwrappedConstructor {
private final String id;
private final @Unwrapped.Empty EmbeddableType embeddableValue;
public WithUnwrappedConstructor(String id, EmbeddableType embeddableValue) {
this.id = id;
this.embeddableValue = embeddableValue;
}
}
static class WithPrefixedNullableUnwrapped {
String id;

View File

@@ -353,7 +353,7 @@ public abstract class AbstractEncryptionTestBase {
template.save(p3);
template.execute(Person.class, collection -> {
collection.find(new Document()).forEach(it -> System.out.println(it.toJson()));
collection.find(new Document());
return null;
});

View File

@@ -126,7 +126,7 @@ At the time of this writing, we provide support for the following Aggregation Op
| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue`
| Group/Accumulator Aggregation Operators
| `addToSet`, `bottom`, `bottomN`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `firstN`, `last`, `lastN` `max`, `maxN`, `min`, `minN`, `avg`, `push`, `sum`, `top`, `topN`, `count` (+++*+++), `percentile`, `stdDevPop`, `stdDevSamp`
| `addToSet`, `bottom`, `bottomN`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `firstN`, `last`, `lastN` `max`, `maxN`, `min`, `minN`, `avg`, `push`, `sum`, `top`, `topN`, `count` (+++*+++), `stdDevPop`, `stdDevSamp`
| Arithmetic Aggregation Operators
| `abs`, `acos`, `acosh`, `add` (+++*+++ via `plus`), `asin`, `asin`, `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc`