Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1c10eb3465 | ||
|
|
1b53059f55 | ||
|
|
621c34b490 | ||
|
|
2df697b45b | ||
|
|
687a2ea68c | ||
|
|
aca91ffec1 | ||
|
|
5c12963ba2 | ||
|
|
0b8a3cf774 | ||
|
|
4d0e2c31d1 | ||
|
|
90fe6805b9 | ||
|
|
ef7870a3a4 | ||
|
|
e1faf2652f | ||
|
|
a169203cec | ||
|
|
8b9fd1f1b5 | ||
|
|
a1725ea2f4 | ||
|
|
06e11ec75d | ||
|
|
55ab4aa89e | ||
|
|
7231de434c | ||
|
|
c00f34ad1f | ||
|
|
91d888aa69 | ||
|
|
31e949638f | ||
|
|
5bb73641bd | ||
|
|
e959b0494c | ||
|
|
ffe95ff72d |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
||||
#Tue Jun 13 08:51:00 CEST 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.2/apache-maven-3.9.2-bin.zip
|
||||
#Fri Aug 11 14:49:02 EDT 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip
|
||||
|
||||
43
Jenkinsfile
vendored
43
Jenkinsfile
vendored
@@ -20,25 +20,6 @@ pipeline {
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK (main) + MongoDB 4.0') {
|
||||
when {
|
||||
anyOf {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-4.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.0.version']} ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK (main) + MongoDB 4.4') {
|
||||
when {
|
||||
anyOf {
|
||||
@@ -116,7 +97,7 @@ pipeline {
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
@@ -137,28 +118,6 @@ pipeline {
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.4 (main)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 5.0 (main)") {
|
||||
agent {
|
||||
label 'data'
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 && \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 && \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,7 +1,7 @@
|
||||
# Java versions
|
||||
java.main.tag=8u362-b09-jdk-focal
|
||||
java.next.tag=11.0.18_10-jdk-focal
|
||||
java.lts.tag=17.0.6_10-jdk-focal
|
||||
java.main.tag=8u382-b05-jdk-focal
|
||||
java.next.tag=20-jdk-jammy
|
||||
java.lts.tag=17.0.8_7-jdk-focal
|
||||
|
||||
# Docker container images - standard
|
||||
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
|
||||
@@ -10,14 +10,14 @@ docker.java.lts.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclip
|
||||
|
||||
# Supported versions of MongoDB
|
||||
docker.mongodb.4.0.version=4.0.28
|
||||
docker.mongodb.4.4.version=4.4.18
|
||||
docker.mongodb.5.0.version=5.0.14
|
||||
docker.mongodb.4.4.version=4.4.23
|
||||
docker.mongodb.5.0.version=5.0.19
|
||||
|
||||
# Supported versions of Redis
|
||||
docker.redis.6.version=6.2.10
|
||||
docker.redis.6.version=6.2.13
|
||||
|
||||
# Supported versions of Cassandra
|
||||
docker.cassandra.3.version=3.11.14
|
||||
docker.cassandra.3.version=3.11.15
|
||||
|
||||
# Docker environment settings
|
||||
docker.java.inside.basic=-v $HOME:/tmp/jenkins-home
|
||||
|
||||
6
pom.xml
6
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.13</version>
|
||||
<version>3.4.15</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.7.13</version>
|
||||
<version>2.7.15</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,7 +26,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.7.13</springdata.commons>
|
||||
<springdata.commons>2.7.15</springdata.commons>
|
||||
<mongo>4.6.1</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.13</version>
|
||||
<version>3.4.15</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.13</version>
|
||||
<version>3.4.15</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.13</version>
|
||||
<version>3.4.15</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -207,8 +207,9 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required));
|
||||
}
|
||||
}
|
||||
return targetProperties.size() == 1 ? targetProperties.iterator().next()
|
||||
JsonSchemaProperty schemaProperty = targetProperties.size() == 1 ? targetProperties.iterator().next()
|
||||
: JsonSchemaProperty.merged(targetProperties);
|
||||
return applyEncryptionDataIfNecessary(property, schemaProperty);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ class AggregationOperationRenderer {
|
||||
contextToUse = new InheritingExposedFieldsAggregationOperationContext(fields, contextToUse);
|
||||
} else {
|
||||
contextToUse = fields.exposesNoFields() ? DEFAULT_CONTEXT
|
||||
: new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), contextToUse);
|
||||
: new ExposedFieldsAggregationOperationContext(fields, contextToUse);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
|
||||
/**
|
||||
@@ -22,6 +23,7 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldRefe
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAggregationOperationContext {
|
||||
@@ -47,6 +49,11 @@ class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAg
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#resolveExposedField(org.springframework.data.mongodb.core.aggregation.Field, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document) {
|
||||
return previousContext.getMappedObject(document);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FieldReference resolveExposedField(Field field, String name) {
|
||||
|
||||
|
||||
@@ -1077,7 +1077,7 @@ public class QueryMapper {
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?");
|
||||
private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+(?!$)");
|
||||
private static final Pattern NUMERIC_SEGMENT = Pattern.compile("\\d+");
|
||||
private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!";
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
@@ -1243,14 +1243,13 @@ public class QueryMapper {
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression,
|
||||
@Nullable MongoPersistentProperty sourceProperty) {
|
||||
|
||||
String rawPath = removePlaceholders(POSITIONAL_OPERATOR,
|
||||
removePlaceholders(DOT_POSITIONAL_PATTERN, pathExpression));
|
||||
|
||||
if (sourceProperty != null && sourceProperty.getOwner().equals(entity)) {
|
||||
return mappingContext.getPersistentPropertyPath(
|
||||
PropertyPath.from(Pattern.quote(sourceProperty.getName()), entity.getTypeInformation()));
|
||||
}
|
||||
|
||||
String rawPath = resolvePath(pathExpression);
|
||||
|
||||
PropertyPath path = forName(rawPath);
|
||||
if (path == null || isPathToJavaLangClassProperty(path)) {
|
||||
return null;
|
||||
@@ -1345,6 +1344,38 @@ public class QueryMapper {
|
||||
return false;
|
||||
}
|
||||
|
||||
private static String resolvePath(String source) {
|
||||
|
||||
String[] segments = source.split("\\.");
|
||||
if (segments.length == 1) {
|
||||
return source;
|
||||
}
|
||||
|
||||
List<String> path = new ArrayList<>(segments.length);
|
||||
|
||||
/* always start from a property, so we can skip the first segment.
|
||||
from there remove any position placeholder */
|
||||
for(int i=1; i < segments.length; i++) {
|
||||
String segment = segments[i];
|
||||
if (segment.startsWith("[") && segment.endsWith("]")) {
|
||||
continue;
|
||||
}
|
||||
if (NUMERIC_SEGMENT.matcher(segment).matches()) {
|
||||
continue;
|
||||
}
|
||||
path.add(segment);
|
||||
}
|
||||
|
||||
// when property is followed only by placeholders eg. 'values.0.3.90'
|
||||
// or when there is no difference in the number of segments
|
||||
if (path.isEmpty() || segments.length == path.size() + 1) {
|
||||
return source;
|
||||
}
|
||||
|
||||
path.add(0, segments[0]);
|
||||
return StringUtils.collectionToDelimitedString(path, ".");
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Converter} to be used to created the mapped key. Default implementation will use
|
||||
* {@link PropertyToFieldNameConverter}.
|
||||
|
||||
@@ -36,6 +36,7 @@ import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.Timest
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link JsonSchemaProperty} implementation.
|
||||
@@ -1171,7 +1172,9 @@ public class IdentifiableJsonSchemaProperty<T extends JsonSchemaObject> implemen
|
||||
enc.append("bsonType", type.toBsonType().value()); // TODO: no samples with type -> is it bson type all the way?
|
||||
}
|
||||
|
||||
enc.append("algorithm", algorithm);
|
||||
if (StringUtils.hasText(algorithm)) {
|
||||
enc.append("algorithm", algorithm);
|
||||
}
|
||||
|
||||
propertySpecification.append("encrypt", enc);
|
||||
|
||||
|
||||
@@ -271,6 +271,17 @@ class MappingMongoJsonSchemaCreatorUnitTests {
|
||||
.containsEntry("properties.value", new Document("type", "string"));
|
||||
}
|
||||
|
||||
@Test // GH-4454
|
||||
void wrapEncryptedEntityTypeLikeProperty() {
|
||||
|
||||
MongoJsonSchema schema = MongoJsonSchemaCreator.create() //
|
||||
.filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields
|
||||
.createSchemaFor(WithEncryptedEntityLikeProperty.class);
|
||||
|
||||
assertThat(schema.schemaDocument()) //
|
||||
.containsEntry("properties.domainTypeValue", Document.parse("{'encrypt': {'bsonType': 'object' } }"));
|
||||
}
|
||||
|
||||
// --> TYPES AND JSON
|
||||
|
||||
// --> ENUM
|
||||
@@ -684,4 +695,9 @@ class MappingMongoJsonSchemaCreatorUnitTests {
|
||||
static class PropertyClashWithA {
|
||||
Integer aNonEncrypted;
|
||||
}
|
||||
|
||||
@Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
|
||||
static class WithEncryptedEntityLikeProperty {
|
||||
@Encrypted SomeDomainType domainTypeValue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,8 +35,10 @@ import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration;
|
||||
import org.springframework.data.mongodb.core.CollectionOptions.ValidationOptions;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.test.util.Client;
|
||||
import org.springframework.data.mongodb.test.util.MongoClientExtension;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -48,11 +50,13 @@ import com.mongodb.client.model.ValidationLevel;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link CollectionOptions#validation(ValidationOptions)} using
|
||||
* {@link org.springframework.data.mongodb.core.validation.CriteriaValidator} and
|
||||
* {@link org.springframework.data.mongodb.core.validation.DocumentValidator}.
|
||||
* {@link org.springframework.data.mongodb.core.validation.CriteriaValidator},
|
||||
* {@link org.springframework.data.mongodb.core.validation.DocumentValidator} and
|
||||
* {@link org.springframework.data.mongodb.core.validation.JsonSchemaValidator}.
|
||||
*
|
||||
* @author Andreas Zink
|
||||
* @author Christoph Strobl
|
||||
* @author Julia Lee
|
||||
*/
|
||||
@ExtendWith({ MongoClientExtension.class, SpringExtension.class })
|
||||
public class MongoTemplateValidationTests {
|
||||
@@ -188,6 +192,20 @@ public class MongoTemplateValidationTests {
|
||||
assertThat(getValidatorInfo(COLLECTION_NAME)).isEqualTo(new Document("customName", new Document("$type", "bool")));
|
||||
}
|
||||
|
||||
@Test // GH-4454
|
||||
public void failsJsonSchemaValidationForEncryptedDomainEntityProperty() {
|
||||
|
||||
MongoJsonSchema schema = MongoJsonSchemaCreator.create().createSchemaFor(BeanWithEncryptedDomainEntity.class);
|
||||
template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schema(schema));
|
||||
|
||||
BeanWithEncryptedDomainEntity person = new BeanWithEncryptedDomainEntity();
|
||||
person.encryptedDomainEntity = new SimpleBean("some string", 100, null);
|
||||
|
||||
assertThatExceptionOfType(DataIntegrityViolationException.class)
|
||||
.isThrownBy(() -> template.save(person))
|
||||
.withMessageContaining("Document failed validation");
|
||||
}
|
||||
|
||||
private Document getCollectionOptions(String collectionName) {
|
||||
return getCollectionInfo(collectionName).get("options", Document.class);
|
||||
}
|
||||
@@ -222,4 +240,10 @@ public class MongoTemplateValidationTests {
|
||||
private @Nullable Integer rangedInteger;
|
||||
private @Field("customName") Object customFieldName;
|
||||
}
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = COLLECTION_NAME)
|
||||
@Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
|
||||
static class BeanWithEncryptedDomainEntity {
|
||||
@Encrypted SimpleBean encryptedDomainEntity;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.assertj.core.api.InstanceOfAssertFactories;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Julia Lee
|
||||
*/
|
||||
public class AggregationOperationRendererUnitTests {
|
||||
|
||||
@Test // GH-4443
|
||||
void nonFieldsExposingAggregationOperationContinuesWithSameContextForNextStage() {
|
||||
|
||||
AggregationOperationContext rootContext = mock(AggregationOperationContext.class);
|
||||
AggregationOperation stage1 = mock(AggregationOperation.class);
|
||||
AggregationOperation stage2 = mock(AggregationOperation.class);
|
||||
|
||||
AggregationOperationRenderer.toDocument(Arrays.asList(stage1, stage2), rootContext);
|
||||
|
||||
verify(stage1).toPipelineStages(eq(rootContext));
|
||||
verify(stage2).toPipelineStages(eq(rootContext));
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void fieldsExposingAggregationOperationNotExposingFieldsForcesUseOfDefaultContextForNextStage() {
|
||||
|
||||
AggregationOperationContext rootContext = mock(AggregationOperationContext.class);
|
||||
FieldsExposingAggregationOperation stage1 = mock(FieldsExposingAggregationOperation.class);
|
||||
ExposedFields stage1fields = mock(ExposedFields.class);
|
||||
AggregationOperation stage2 = mock(AggregationOperation.class);
|
||||
|
||||
when(stage1.getFields()).thenReturn(stage1fields);
|
||||
when(stage1fields.exposesNoFields()).thenReturn(true);
|
||||
|
||||
AggregationOperationRenderer.toDocument(Arrays.asList(stage1, stage2), rootContext);
|
||||
|
||||
verify(stage1).toPipelineStages(eq(rootContext));
|
||||
verify(stage2).toPipelineStages(eq(AggregationOperationRenderer.DEFAULT_CONTEXT));
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void fieldsExposingAggregationOperationForcesNewContextForNextStage() {
|
||||
|
||||
AggregationOperationContext rootContext = mock(AggregationOperationContext.class);
|
||||
FieldsExposingAggregationOperation stage1 = mock(FieldsExposingAggregationOperation.class);
|
||||
ExposedFields stage1fields = mock(ExposedFields.class);
|
||||
AggregationOperation stage2 = mock(AggregationOperation.class);
|
||||
|
||||
when(stage1.getFields()).thenReturn(stage1fields);
|
||||
when(stage1fields.exposesNoFields()).thenReturn(false);
|
||||
|
||||
ArgumentCaptor<AggregationOperationContext> captor = ArgumentCaptor.forClass(AggregationOperationContext.class);
|
||||
|
||||
AggregationOperationRenderer.toDocument(Arrays.asList(stage1, stage2), rootContext);
|
||||
|
||||
verify(stage1).toPipelineStages(eq(rootContext));
|
||||
verify(stage2).toPipelineStages(captor.capture());
|
||||
|
||||
assertThat(captor.getValue()).isInstanceOf(ExposedFieldsAggregationOperationContext.class)
|
||||
.isNotInstanceOf(InheritingExposedFieldsAggregationOperationContext.class);
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void inheritingFieldsExposingAggregationOperationForcesNewContextForNextStageKeepingReferenceToPreviousContext() {
|
||||
|
||||
AggregationOperationContext rootContext = mock(AggregationOperationContext.class);
|
||||
InheritsFieldsAggregationOperation stage1 = mock(InheritsFieldsAggregationOperation.class);
|
||||
InheritsFieldsAggregationOperation stage2 = mock(InheritsFieldsAggregationOperation.class);
|
||||
InheritsFieldsAggregationOperation stage3 = mock(InheritsFieldsAggregationOperation.class);
|
||||
|
||||
ExposedFields exposedFields = mock(ExposedFields.class);
|
||||
when(exposedFields.exposesNoFields()).thenReturn(false);
|
||||
when(stage1.getFields()).thenReturn(exposedFields);
|
||||
when(stage2.getFields()).thenReturn(exposedFields);
|
||||
when(stage3.getFields()).thenReturn(exposedFields);
|
||||
|
||||
ArgumentCaptor<AggregationOperationContext> captor = ArgumentCaptor.forClass(AggregationOperationContext.class);
|
||||
|
||||
AggregationOperationRenderer.toDocument(Arrays.asList(stage1, stage2, stage3), rootContext);
|
||||
|
||||
verify(stage1).toPipelineStages(captor.capture());
|
||||
verify(stage2).toPipelineStages(captor.capture());
|
||||
verify(stage3).toPipelineStages(captor.capture());
|
||||
|
||||
assertThat(captor.getAllValues().get(0)).isEqualTo(rootContext);
|
||||
|
||||
assertThat(captor.getAllValues().get(1))
|
||||
.asInstanceOf(InstanceOfAssertFactories.type(InheritingExposedFieldsAggregationOperationContext.class))
|
||||
.extracting("previousContext").isSameAs(captor.getAllValues().get(0));
|
||||
|
||||
assertThat(captor.getAllValues().get(2))
|
||||
.asInstanceOf(InstanceOfAssertFactories.type(InheritingExposedFieldsAggregationOperationContext.class))
|
||||
.extracting("previousContext").isSameAs(captor.getAllValues().get(1));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -87,6 +87,7 @@ import com.mongodb.client.MongoCollection;
|
||||
* @author Maninder Singh
|
||||
* @author Sergey Shcherbakov
|
||||
* @author Minsu Kim
|
||||
* @author Julia Lee
|
||||
*/
|
||||
@ExtendWith(MongoTemplateExtension.class)
|
||||
public class AggregationTests {
|
||||
@@ -116,7 +117,7 @@ public class AggregationTests {
|
||||
|
||||
mongoTemplate.flush(Product.class, UserWithLikes.class, DATAMONGO753.class, Data.class, DATAMONGO788.class,
|
||||
User.class, Person.class, Reservation.class, Venue.class, MeterData.class, LineItem.class, InventoryItem.class,
|
||||
Sales.class, Sales2.class, Employee.class, Art.class, Venue.class);
|
||||
Sales.class, Sales2.class, Employee.class, Art.class, Venue.class, Item.class);
|
||||
|
||||
mongoTemplate.dropCollection(INPUT_COLLECTION);
|
||||
mongoTemplate.dropCollection("personQueryTemp");
|
||||
@@ -1962,6 +1963,42 @@ public class AggregationTests {
|
||||
assertThat(aggregate.getMappedResults()).contains(widget);
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void shouldHonorFieldAliasesForFieldReferencesUsingFieldExposingOperation() {
|
||||
|
||||
Item item1 = Item.builder().itemId("1").tags(Arrays.asList("a", "b")).build();
|
||||
Item item2 = Item.builder().itemId("1").tags(Arrays.asList("a", "c")).build();
|
||||
mongoTemplate.insert(Arrays.asList(item1, item2), Item.class);
|
||||
|
||||
TypedAggregation<Item> aggregation = newAggregation(Item.class,
|
||||
match(where("itemId").is("1")),
|
||||
unwind("tags"),
|
||||
match(where("itemId").is("1").and("tags").is("c")));
|
||||
AggregationResults<Document> results = mongoTemplate.aggregate(aggregation, Document.class);
|
||||
List<Document> mappedResults = results.getMappedResults();
|
||||
assertThat(mappedResults).hasSize(1);
|
||||
assertThat(mappedResults.get(0)).containsEntry("item_id", "1");
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void projectShouldResetContextToAvoidMappingFieldsAgainstANoLongerExistingTarget() {
|
||||
|
||||
Item item1 = Item.builder().itemId("1").tags(Arrays.asList("a", "b")).build();
|
||||
Item item2 = Item.builder().itemId("1").tags(Arrays.asList("a", "c")).build();
|
||||
mongoTemplate.insert(Arrays.asList(item1, item2), Item.class);
|
||||
|
||||
TypedAggregation<Item> aggregation = newAggregation(Item.class,
|
||||
match(where("itemId").is("1")),
|
||||
unwind("tags"),
|
||||
project().and("itemId").as("itemId").and("tags").as("tags"),
|
||||
match(where("itemId").is("1").and("tags").is("c")));
|
||||
|
||||
AggregationResults<Document> results = mongoTemplate.aggregate(aggregation, Document.class);
|
||||
List<Document> mappedResults = results.getMappedResults();
|
||||
assertThat(mappedResults).hasSize(1);
|
||||
assertThat(mappedResults.get(0)).containsEntry("itemId", "1");
|
||||
}
|
||||
|
||||
private void createUsersWithReferencedPersons() {
|
||||
|
||||
mongoTemplate.dropCollection(User.class);
|
||||
@@ -2220,7 +2257,7 @@ public class AggregationTests {
|
||||
List<Item> items;
|
||||
}
|
||||
|
||||
// DATAMONGO-1491
|
||||
// DATAMONGO-1491, GH-4443
|
||||
@lombok.Data
|
||||
@Builder
|
||||
static class Item {
|
||||
@@ -2229,6 +2266,7 @@ public class AggregationTests {
|
||||
String itemId;
|
||||
Integer quantity;
|
||||
Long price;
|
||||
List<String> tags = new ArrayList<>();
|
||||
}
|
||||
|
||||
// DATAMONGO-1538
|
||||
|
||||
@@ -44,6 +44,7 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Julia Lee
|
||||
*/
|
||||
public class AggregationUnitTests {
|
||||
|
||||
@@ -607,7 +608,23 @@ public class AggregationUnitTests {
|
||||
RelaxedTypeBasedAggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(WithRetypedIdField.class, mappingContext,
|
||||
new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)));
|
||||
Document document = project(WithRetypedIdField.class).toDocument(context);
|
||||
assertThat(document).isEqualTo(new Document("$project", new Document("_id", 1).append("renamed-field", 1)));
|
||||
assertThat(document).isEqualTo(new Document("$project", new Document("_id", 1).append("renamed-field", 1).append("entries", 1)));
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void fieldsExposingContextShouldUseCustomFieldNameFromRelaxedRootContext() {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
RelaxedTypeBasedAggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(
|
||||
WithRetypedIdField.class, mappingContext,
|
||||
new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)));
|
||||
|
||||
TypedAggregation<WithRetypedIdField> agg = newAggregation(WithRetypedIdField.class,
|
||||
unwind("entries"), match(where("foo").is("value 2")));
|
||||
List<Document> pipeline = agg.toPipeline(context);
|
||||
|
||||
Document fields = getAsDocument(pipeline.get(1), "$match");
|
||||
assertThat(fields.get("renamed-field")).isEqualTo("value 2");
|
||||
}
|
||||
|
||||
private Document extractPipelineElement(Document agg, int index, String operation) {
|
||||
@@ -625,5 +642,7 @@ public class AggregationUnitTests {
|
||||
@org.springframework.data.mongodb.core.mapping.Field("renamed-field")
|
||||
private String foo;
|
||||
|
||||
private List<String> entries = new ArrayList<>();
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,6 +35,8 @@ import org.bson.types.ObjectId;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
@@ -1212,24 +1214,26 @@ class UpdateMapperUnitTests {
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.a.b.d", "e")));
|
||||
}
|
||||
|
||||
@Test // GH-3775
|
||||
void mapNestedIntegerFieldCorrectly() {
|
||||
@ParameterizedTest // GH-3775, GH-4426
|
||||
@ValueSource(strings = {"levelOne.0.1.3", "levelOne.0.1.32", "levelOne2.0.1.32", "levelOne2.0.1.320"})
|
||||
void mapNestedIntegerFieldCorrectly(String path) {
|
||||
|
||||
Update update = new Update().set("levelOne.0.1.3", "4");
|
||||
Update update = new Update().set(path, "4");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithNestedMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0.1.3", "4")));
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document(path, "4")));
|
||||
}
|
||||
|
||||
@Test // GH-3775
|
||||
void mapNestedMixedStringIntegerFieldCorrectly() {
|
||||
@ParameterizedTest // GH-3775, GH-4426
|
||||
@ValueSource(strings = {"levelOne.0.1.c", "levelOne.0.1.c.32", "levelOne2.0.1.32.c", "levelOne2.0.1.c.320"})
|
||||
void mapNestedMixedStringIntegerFieldCorrectly(String path) {
|
||||
|
||||
Update update = new Update().set("levelOne.0.1.c", "4");
|
||||
Update update = new Update().set(path, "4");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithNestedMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0.1.c", "4")));
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document(path, "4")));
|
||||
}
|
||||
|
||||
@Test // GH-3775
|
||||
@@ -1719,6 +1723,7 @@ class UpdateMapperUnitTests {
|
||||
|
||||
static class EntityWithNestedMap {
|
||||
Map<String, Map<String, Map<String, Object>>> levelOne;
|
||||
Map<String, Map<String, Map<String, Object>>> levelOne2;
|
||||
}
|
||||
|
||||
static class Customer {
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
mock-maker-inline
|
||||
@@ -88,7 +88,7 @@ class Person {
|
||||
----
|
||||
Account account = …
|
||||
|
||||
tempate.insert(account); <2>
|
||||
template.insert(account); <2>
|
||||
|
||||
template.update(Person.class)
|
||||
.matching(where("id").is(…))
|
||||
@@ -441,7 +441,7 @@ class Entity {
|
||||
"lastname" : "Long", <2>
|
||||
}
|
||||
----
|
||||
<1> Read/wirte the keys `fn` & `ln` from/to the linkage document based on the lookup query.
|
||||
<1> Read/write the keys `fn` & `ln` from/to the linkage document based on the lookup query.
|
||||
<2> Use non _id_ fields for the lookup of the target documents.
|
||||
====
|
||||
|
||||
@@ -477,7 +477,7 @@ class ToDocumentPointerConverter implements Converter<ReferencedObject, Document
|
||||
}
|
||||
}
|
||||
----
|
||||
<1> Read/wirte the keys `_id` from/to the reference document to use them in the lookup query.
|
||||
<1> Read/write the keys `_id` from/to the reference document to use them in the lookup query.
|
||||
<2> The collection name can be read from the reference document using its key.
|
||||
====
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ embedded schema objects that describe properties and subdocuments.
|
||||
<2> `required` is a property that describes which properties are required in a document. It can be specified optionally, along with other
|
||||
schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords].
|
||||
<3> `properties` is related to a schema object that describes an `object` type. It contains property-specific schema constraints.
|
||||
<4> `firstname` specifies constraints for the `firsname` field inside the document. Here, it is a string-based `properties` element declaring
|
||||
<4> `firstname` specifies constraints for the `firstname` field inside the document. Here, it is a string-based `properties` element declaring
|
||||
possible field values.
|
||||
<5> `address` is a subdocument defining a schema for values in its `postCode` field.
|
||||
====
|
||||
|
||||
@@ -71,7 +71,7 @@ Therefore, the `Sort` properties are mapped against the methods return type `Per
|
||||
<4> `$skip`, `$limit` and `$sort` can be passed on via a `Pageable` argument. Same as in <2>, the operators are appended to the pipeline definition. Methods accepting `Pageable` can return `Slice` for easier pagination.
|
||||
<5> Aggregation methods can return `Stream` to consume results directly from an underlying cursor. Make sure to close the stream after consuming it to release the server-side cursor by either calling `close()` or through `try-with-resources`.
|
||||
<6> Map the result of an aggregation returning a single `Document` to an instance of a desired `SumValue` target type.
|
||||
<7> Aggregations resulting in single document holding just an accumulation result like eg. `$sum` can be extracted directly from the result `Document`.
|
||||
<7> Aggregations resulting in single document holding just an accumulation result like e.g. `$sum` can be extracted directly from the result `Document`.
|
||||
To gain more control, you might consider `AggregationResult` as method return type as shown in <7>.
|
||||
<8> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`.
|
||||
<9> Like in <6>, a single value can be directly obtained from multiple result ``Document``s.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 3.4.13 (2021.2.13)
|
||||
Spring Data MongoDB 3.4.15 (2021.2.15)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -48,6 +48,8 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user