Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c8432ef71a | ||
|
|
ad3cfda587 | ||
|
|
9a68ee4e7f | ||
|
|
7517527bda | ||
|
|
0bfb32a109 | ||
|
|
ca44302a9f | ||
|
|
9ebbbde47e | ||
|
|
24b0119f10 | ||
|
|
8a97ea5a5e | ||
|
|
d7a8206000 | ||
|
|
5c99c20ee2 | ||
|
|
7946e2b7c9 | ||
|
|
adea4ba0a9 | ||
|
|
529a0c7b15 | ||
|
|
77a96229c8 | ||
|
|
4077224a04 |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
||||
#Mon Jul 03 09:48:21 CEST 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.3/apache-maven-3.9.3-bin.zip
|
||||
#Mon Aug 14 07:55:26 EDT 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Java versions
|
||||
java.main.tag=17.0.7_7-jdk-focal
|
||||
java.main.tag=17.0.8_7-jdk-focal
|
||||
java.next.tag=20-jdk-jammy
|
||||
|
||||
# Docker container images - standard
|
||||
@@ -7,12 +7,12 @@ docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/ecli
|
||||
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
|
||||
|
||||
# Supported versions of MongoDB
|
||||
docker.mongodb.4.4.version=4.4.22
|
||||
docker.mongodb.5.0.version=5.0.18
|
||||
docker.mongodb.6.0.version=6.0.7
|
||||
docker.mongodb.4.4.version=4.4.23
|
||||
docker.mongodb.5.0.version=5.0.19
|
||||
docker.mongodb.6.0.version=6.0.8
|
||||
|
||||
# Supported versions of Redis
|
||||
docker.redis.6.version=6.2.12
|
||||
docker.redis.6.version=6.2.13
|
||||
|
||||
# Supported versions of Cassandra
|
||||
docker.cassandra.3.version=3.11.15
|
||||
|
||||
6
pom.xml
6
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.2</version>
|
||||
<version>4.1.3</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>3.1.2</version>
|
||||
<version>3.1.3</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,7 +26,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>3.1.2</springdata.commons>
|
||||
<springdata.commons>3.1.3</springdata.commons>
|
||||
<mongo>4.9.1</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.2</version>
|
||||
<version>4.1.3</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.2</version>
|
||||
<version>4.1.3</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.2</version>
|
||||
<version>4.1.3</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -203,8 +203,9 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required));
|
||||
}
|
||||
}
|
||||
return targetProperties.size() == 1 ? targetProperties.iterator().next()
|
||||
JsonSchemaProperty schemaProperty = targetProperties.size() == 1 ? targetProperties.iterator().next()
|
||||
: JsonSchemaProperty.merged(targetProperties);
|
||||
return applyEncryptionDataIfNecessary(property, schemaProperty);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ class AggregationOperationRenderer {
|
||||
contextToUse = new InheritingExposedFieldsAggregationOperationContext(fields, contextToUse);
|
||||
} else {
|
||||
contextToUse = fields.exposesNoFields() ? DEFAULT_CONTEXT
|
||||
: new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), contextToUse);
|
||||
: new ExposedFieldsAggregationOperationContext(fields, contextToUse);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,133 @@
|
||||
/*
|
||||
* Copyright 2022-2024 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* A special field that points to a variable {@code $$} expression.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 4.1.3
|
||||
*/
|
||||
public interface AggregationVariable extends Field {
|
||||
|
||||
String PREFIX = "$$";
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the fields {@link #getName() name} does not match the defined {@link #getTarget()
|
||||
* target}.
|
||||
*/
|
||||
@Override
|
||||
default boolean isAliased() {
|
||||
return !ObjectUtils.nullSafeEquals(getName(), getTarget());
|
||||
}
|
||||
|
||||
@Override
|
||||
default String getName() {
|
||||
return getTarget();
|
||||
}
|
||||
|
||||
@Override
|
||||
default boolean isInternal() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link AggregationVariable} for the given name.
|
||||
* <p>
|
||||
* Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link AggregationVariable}.
|
||||
* @throws IllegalArgumentException if given value is {@literal null}.
|
||||
*/
|
||||
static AggregationVariable variable(String value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null");
|
||||
return new AggregationVariable() {
|
||||
|
||||
private final String val = AggregationVariable.prefixVariable(value);
|
||||
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return val;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link #isInternal() local} {@link AggregationVariable} for the given name.
|
||||
* <p>
|
||||
* Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link AggregationVariable}.
|
||||
* @throws IllegalArgumentException if given value is {@literal null}.
|
||||
*/
|
||||
static AggregationVariable localVariable(String value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null");
|
||||
return new AggregationVariable() {
|
||||
|
||||
private final String val = AggregationVariable.prefixVariable(value);
|
||||
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return val;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInternal() {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given field name reference may be variable.
|
||||
*
|
||||
* @param fieldRef can be {@literal null}.
|
||||
* @return true if given value matches the variable identification pattern.
|
||||
*/
|
||||
static boolean isVariable(@Nullable String fieldRef) {
|
||||
return fieldRef != null && fieldRef.stripLeading().matches("^\\$\\$\\w.*");
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given field may be variable.
|
||||
*
|
||||
* @param field can be {@literal null}.
|
||||
* @return true if given {@link Field field} is an {@link AggregationVariable} or if its value is a
|
||||
* {@link #isVariable(String) variable}.
|
||||
*/
|
||||
static boolean isVariable(Field field) {
|
||||
|
||||
if (field instanceof AggregationVariable) {
|
||||
return true;
|
||||
}
|
||||
return isVariable(field.getTarget());
|
||||
}
|
||||
|
||||
private static String prefixVariable(String variable) {
|
||||
|
||||
var trimmed = variable.stripLeading();
|
||||
return trimmed.startsWith(PREFIX) ? trimmed : (PREFIX + trimmed);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1515,24 +1515,15 @@ public class ArrayOperators {
|
||||
}
|
||||
}
|
||||
|
||||
public enum Variable implements Field {
|
||||
public enum Variable implements AggregationVariable {
|
||||
|
||||
THIS {
|
||||
@Override
|
||||
public String getName() {
|
||||
return "$$this";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return "$$this";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getName();
|
||||
@@ -1540,27 +1531,23 @@ public class ArrayOperators {
|
||||
},
|
||||
|
||||
VALUE {
|
||||
@Override
|
||||
public String getName() {
|
||||
return "$$value";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return "$$value";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getName();
|
||||
}
|
||||
};
|
||||
|
||||
@Override
|
||||
public boolean isInternal() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Field} reference to a given {@literal property} prefixed with the {@link Variable} identifier.
|
||||
* eg. {@code $$value.product}
|
||||
@@ -1592,6 +1579,16 @@ public class ArrayOperators {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static boolean isVariable(Field field) {
|
||||
|
||||
for (Variable var : values()) {
|
||||
if (field.getTarget().startsWith(var.getTarget())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ public final class Fields implements Iterable<Field> {
|
||||
|
||||
Assert.notNull(names, "Field names must not be null");
|
||||
|
||||
List<Field> fields = new ArrayList<Field>();
|
||||
List<Field> fields = new ArrayList<>();
|
||||
|
||||
for (String name : names) {
|
||||
fields.add(field(name));
|
||||
@@ -114,7 +114,7 @@ public final class Fields implements Iterable<Field> {
|
||||
|
||||
private static List<Field> verify(List<Field> fields) {
|
||||
|
||||
Map<String, Field> reference = new HashMap<String, Field>();
|
||||
Map<String, Field> reference = new HashMap<>();
|
||||
|
||||
for (Field field : fields) {
|
||||
|
||||
@@ -133,7 +133,7 @@ public final class Fields implements Iterable<Field> {
|
||||
|
||||
private Fields(Fields existing, Field tail) {
|
||||
|
||||
this.fields = new ArrayList<Field>(existing.fields.size() + 1);
|
||||
this.fields = new ArrayList<>(existing.fields.size() + 1);
|
||||
this.fields.addAll(existing.fields);
|
||||
this.fields.add(tail);
|
||||
}
|
||||
@@ -245,7 +245,7 @@ public final class Fields implements Iterable<Field> {
|
||||
|
||||
private static String cleanUp(String source) {
|
||||
|
||||
if (SystemVariable.isReferingToSystemVariable(source)) {
|
||||
if (AggregationVariable.isVariable(source)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
@@ -253,10 +253,12 @@ public final class Fields implements Iterable<Field> {
|
||||
return dollarIndex == -1 ? source : source.substring(dollarIndex + 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTarget() {
|
||||
|
||||
if (isLocalVar() || pointsToDBRefId()) {
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
|
||||
/**
|
||||
@@ -22,6 +23,7 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldRefe
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAggregationOperationContext {
|
||||
@@ -43,6 +45,11 @@ class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAg
|
||||
this.previousContext = previousContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getMappedObject(Document document) {
|
||||
return previousContext.getMappedObject(document);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FieldReference resolveExposedField(Field field, String name) {
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.expression.spel.ast.Projection;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -431,6 +430,7 @@ public class ReplaceRootOperation implements FieldsExposingAggregationOperation
|
||||
* @param context will never be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
Document toDocument(AggregationOperationContext context);
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import org.springframework.lang.Nullable;
|
||||
* @author Christoph Strobl
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/aggregation-variables">Aggregation Variables</a>.
|
||||
*/
|
||||
public enum SystemVariable {
|
||||
public enum SystemVariable implements AggregationVariable {
|
||||
|
||||
/**
|
||||
* Variable for the current datetime.
|
||||
@@ -82,8 +82,6 @@ public enum SystemVariable {
|
||||
*/
|
||||
SEARCH_META;
|
||||
|
||||
private static final String PREFIX = "$$";
|
||||
|
||||
/**
|
||||
* Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false}
|
||||
* otherwise.
|
||||
@@ -93,13 +91,12 @@ public enum SystemVariable {
|
||||
*/
|
||||
public static boolean isReferingToSystemVariable(@Nullable String fieldRef) {
|
||||
|
||||
if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) {
|
||||
String candidate = variableNameFrom(fieldRef);
|
||||
if (candidate == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int indexOfFirstDot = fieldRef.indexOf('.');
|
||||
String candidate = fieldRef.substring(2, indexOfFirstDot == -1 ? fieldRef.length() : indexOfFirstDot);
|
||||
|
||||
candidate = candidate.startsWith(PREFIX) ? candidate.substring(2) : candidate;
|
||||
for (SystemVariable value : values()) {
|
||||
if (value.name().equals(candidate)) {
|
||||
return true;
|
||||
@@ -113,4 +110,20 @@ public enum SystemVariable {
|
||||
public String toString() {
|
||||
return PREFIX.concat(name());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return toString();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
static String variableNameFrom(@Nullable String fieldRef) {
|
||||
|
||||
if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) {
|
||||
return null;
|
||||
}
|
||||
|
||||
int indexOfFirstDot = fieldRef.indexOf('.');
|
||||
return indexOfFirstDot == -1 ? fieldRef : fieldRef.substring(2, indexOfFirstDot);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
|
||||
protected FieldReference getReferenceFor(Field field) {
|
||||
|
||||
if(entity.getNullable() == null) {
|
||||
if(entity.getNullable() == null || AggregationVariable.isVariable(field)) {
|
||||
return new DirectFieldReference(new ExposedField(field, true));
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.Timest
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link JsonSchemaProperty} implementation.
|
||||
@@ -1139,7 +1140,9 @@ public class IdentifiableJsonSchemaProperty<T extends JsonSchemaObject> implemen
|
||||
enc.append("bsonType", type.toBsonType().value()); // TODO: no samples with type -> is it bson type all the way?
|
||||
}
|
||||
|
||||
enc.append("algorithm", algorithm);
|
||||
if (StringUtils.hasText(algorithm)) {
|
||||
enc.append("algorithm", algorithm);
|
||||
}
|
||||
|
||||
propertySpecification.append("encrypt", enc);
|
||||
|
||||
|
||||
@@ -271,6 +271,17 @@ class MappingMongoJsonSchemaCreatorUnitTests {
|
||||
.containsEntry("properties.value", new Document("type", "string"));
|
||||
}
|
||||
|
||||
@Test // GH-4454
|
||||
void wrapEncryptedEntityTypeLikeProperty() {
|
||||
|
||||
MongoJsonSchema schema = MongoJsonSchemaCreator.create() //
|
||||
.filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields
|
||||
.createSchemaFor(WithEncryptedEntityLikeProperty.class);
|
||||
|
||||
assertThat(schema.schemaDocument()) //
|
||||
.containsEntry("properties.domainTypeValue", Document.parse("{'encrypt': {'bsonType': 'object' } }"));
|
||||
}
|
||||
|
||||
// --> TYPES AND JSON
|
||||
|
||||
// --> ENUM
|
||||
@@ -676,4 +687,9 @@ class MappingMongoJsonSchemaCreatorUnitTests {
|
||||
static class PropertyClashWithA {
|
||||
Integer aNonEncrypted;
|
||||
}
|
||||
|
||||
@Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
|
||||
static class WithEncryptedEntityLikeProperty {
|
||||
@Encrypted SomeDomainType domainTypeValue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,8 +35,10 @@ import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration;
|
||||
import org.springframework.data.mongodb.core.CollectionOptions.ValidationOptions;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.test.util.Client;
|
||||
import org.springframework.data.mongodb.test.util.MongoClientExtension;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -48,11 +50,13 @@ import com.mongodb.client.model.ValidationLevel;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link CollectionOptions#validation(ValidationOptions)} using
|
||||
* {@link org.springframework.data.mongodb.core.validation.CriteriaValidator} and
|
||||
* {@link org.springframework.data.mongodb.core.validation.DocumentValidator}.
|
||||
* {@link org.springframework.data.mongodb.core.validation.CriteriaValidator},
|
||||
* {@link org.springframework.data.mongodb.core.validation.DocumentValidator} and
|
||||
* {@link org.springframework.data.mongodb.core.validation.JsonSchemaValidator}.
|
||||
*
|
||||
* @author Andreas Zink
|
||||
* @author Christoph Strobl
|
||||
* @author Julia Lee
|
||||
*/
|
||||
@ExtendWith({ MongoClientExtension.class, SpringExtension.class })
|
||||
public class MongoTemplateValidationTests {
|
||||
@@ -188,6 +192,20 @@ public class MongoTemplateValidationTests {
|
||||
assertThat(getValidatorInfo(COLLECTION_NAME)).isEqualTo(new Document("customName", new Document("$type", "bool")));
|
||||
}
|
||||
|
||||
@Test // GH-4454
|
||||
public void failsJsonSchemaValidationForEncryptedDomainEntityProperty() {
|
||||
|
||||
MongoJsonSchema schema = MongoJsonSchemaCreator.create().createSchemaFor(BeanWithEncryptedDomainEntity.class);
|
||||
template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schema(schema));
|
||||
|
||||
BeanWithEncryptedDomainEntity person = new BeanWithEncryptedDomainEntity();
|
||||
person.encryptedDomainEntity = new SimpleBean("some string", 100, null);
|
||||
|
||||
assertThatExceptionOfType(DataIntegrityViolationException.class)
|
||||
.isThrownBy(() -> template.save(person))
|
||||
.withMessageContaining("Document failed validation");
|
||||
}
|
||||
|
||||
private Document getCollectionOptions(String collectionName) {
|
||||
return getCollectionInfo(collectionName).get("options", Document.class);
|
||||
}
|
||||
@@ -222,4 +240,10 @@ public class MongoTemplateValidationTests {
|
||||
private @Nullable Integer rangedInteger;
|
||||
private @Field("customName") Object customFieldName;
|
||||
}
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = COLLECTION_NAME)
|
||||
@Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
|
||||
static class BeanWithEncryptedDomainEntity {
|
||||
@Encrypted SimpleBean encryptedDomainEntity;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,118 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.assertj.core.api.InstanceOfAssertFactories;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AggregationOperationRendererUnitTests {
|
||||
|
||||
@Test // GH-4443
|
||||
void nonFieldsExposingAggregationOperationContinuesWithSameContextForNextStage() {
|
||||
|
||||
AggregationOperationContext rootContext = mock(AggregationOperationContext.class);
|
||||
AggregationOperation stage1 = mock(AggregationOperation.class);
|
||||
AggregationOperation stage2 = mock(AggregationOperation.class);
|
||||
|
||||
AggregationOperationRenderer.toDocument(List.of(stage1, stage2), rootContext);
|
||||
|
||||
verify(stage1).toPipelineStages(eq(rootContext));
|
||||
verify(stage2).toPipelineStages(eq(rootContext));
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void fieldsExposingAggregationOperationNotExposingFieldsForcesUseOfDefaultContextForNextStage() {
|
||||
|
||||
AggregationOperationContext rootContext = mock(AggregationOperationContext.class);
|
||||
FieldsExposingAggregationOperation stage1 = mock(FieldsExposingAggregationOperation.class);
|
||||
ExposedFields stage1fields = mock(ExposedFields.class);
|
||||
AggregationOperation stage2 = mock(AggregationOperation.class);
|
||||
|
||||
when(stage1.getFields()).thenReturn(stage1fields);
|
||||
when(stage1fields.exposesNoFields()).thenReturn(true);
|
||||
|
||||
AggregationOperationRenderer.toDocument(List.of(stage1, stage2), rootContext);
|
||||
|
||||
verify(stage1).toPipelineStages(eq(rootContext));
|
||||
verify(stage2).toPipelineStages(eq(AggregationOperationRenderer.DEFAULT_CONTEXT));
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void fieldsExposingAggregationOperationForcesNewContextForNextStage() {
|
||||
|
||||
AggregationOperationContext rootContext = mock(AggregationOperationContext.class);
|
||||
FieldsExposingAggregationOperation stage1 = mock(FieldsExposingAggregationOperation.class);
|
||||
ExposedFields stage1fields = mock(ExposedFields.class);
|
||||
AggregationOperation stage2 = mock(AggregationOperation.class);
|
||||
|
||||
when(stage1.getFields()).thenReturn(stage1fields);
|
||||
when(stage1fields.exposesNoFields()).thenReturn(false);
|
||||
|
||||
ArgumentCaptor<AggregationOperationContext> captor = ArgumentCaptor.forClass(AggregationOperationContext.class);
|
||||
|
||||
AggregationOperationRenderer.toDocument(List.of(stage1, stage2), rootContext);
|
||||
|
||||
verify(stage1).toPipelineStages(eq(rootContext));
|
||||
verify(stage2).toPipelineStages(captor.capture());
|
||||
|
||||
assertThat(captor.getValue()).isInstanceOf(ExposedFieldsAggregationOperationContext.class)
|
||||
.isNotInstanceOf(InheritingExposedFieldsAggregationOperationContext.class);
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void inheritingFieldsExposingAggregationOperationForcesNewContextForNextStageKeepingReferenceToPreviousContext() {
|
||||
|
||||
AggregationOperationContext rootContext = mock(AggregationOperationContext.class);
|
||||
InheritsFieldsAggregationOperation stage1 = mock(InheritsFieldsAggregationOperation.class);
|
||||
InheritsFieldsAggregationOperation stage2 = mock(InheritsFieldsAggregationOperation.class);
|
||||
InheritsFieldsAggregationOperation stage3 = mock(InheritsFieldsAggregationOperation.class);
|
||||
|
||||
ExposedFields exposedFields = mock(ExposedFields.class);
|
||||
when(exposedFields.exposesNoFields()).thenReturn(false);
|
||||
when(stage1.getFields()).thenReturn(exposedFields);
|
||||
when(stage2.getFields()).thenReturn(exposedFields);
|
||||
when(stage3.getFields()).thenReturn(exposedFields);
|
||||
|
||||
ArgumentCaptor<AggregationOperationContext> captor = ArgumentCaptor.forClass(AggregationOperationContext.class);
|
||||
|
||||
AggregationOperationRenderer.toDocument(List.of(stage1, stage2, stage3), rootContext);
|
||||
|
||||
verify(stage1).toPipelineStages(captor.capture());
|
||||
verify(stage2).toPipelineStages(captor.capture());
|
||||
verify(stage3).toPipelineStages(captor.capture());
|
||||
|
||||
assertThat(captor.getAllValues().get(0)).isEqualTo(rootContext);
|
||||
|
||||
assertThat(captor.getAllValues().get(1))
|
||||
.asInstanceOf(InstanceOfAssertFactories.type(InheritingExposedFieldsAggregationOperationContext.class))
|
||||
.extracting("previousContext").isSameAs(captor.getAllValues().get(0));
|
||||
|
||||
assertThat(captor.getAllValues().get(2))
|
||||
.asInstanceOf(InstanceOfAssertFactories.type(InheritingExposedFieldsAggregationOperationContext.class))
|
||||
.extracting("previousContext").isSameAs(captor.getAllValues().get(1));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -91,6 +91,7 @@ import com.mongodb.client.MongoCollection;
|
||||
* @author Sergey Shcherbakov
|
||||
* @author Minsu Kim
|
||||
* @author Sangyong Choi
|
||||
* @author Julia Lee
|
||||
*/
|
||||
@ExtendWith(MongoTemplateExtension.class)
|
||||
public class AggregationTests {
|
||||
@@ -119,7 +120,7 @@ public class AggregationTests {
|
||||
|
||||
mongoTemplate.flush(Product.class, UserWithLikes.class, DATAMONGO753.class, Data.class, DATAMONGO788.class,
|
||||
User.class, Person.class, Reservation.class, Venue.class, MeterData.class, LineItem.class, InventoryItem.class,
|
||||
Sales.class, Sales2.class, Employee.class, Art.class, Venue.class);
|
||||
Sales.class, Sales2.class, Employee.class, Art.class, Venue.class, Item.class);
|
||||
|
||||
mongoTemplate.dropCollection(INPUT_COLLECTION);
|
||||
mongoTemplate.dropCollection("personQueryTemp");
|
||||
@@ -1992,6 +1993,42 @@ public class AggregationTests {
|
||||
assertThat(aggregate.getMappedResults()).contains(widget);
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void shouldHonorFieldAliasesForFieldReferencesUsingFieldExposingOperation() {
|
||||
|
||||
Item item1 = Item.builder().itemId("1").tags(Arrays.asList("a", "b")).build();
|
||||
Item item2 = Item.builder().itemId("1").tags(Arrays.asList("a", "c")).build();
|
||||
mongoTemplate.insert(Arrays.asList(item1, item2), Item.class);
|
||||
|
||||
TypedAggregation<Item> aggregation = newAggregation(Item.class,
|
||||
match(where("itemId").is("1")),
|
||||
unwind("tags"),
|
||||
match(where("itemId").is("1").and("tags").is("c")));
|
||||
AggregationResults<Document> results = mongoTemplate.aggregate(aggregation, Document.class);
|
||||
List<Document> mappedResults = results.getMappedResults();
|
||||
assertThat(mappedResults).hasSize(1);
|
||||
assertThat(mappedResults.get(0)).containsEntry("item_id", "1");
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void projectShouldResetContextToAvoidMappingFieldsAgainstANoLongerExistingTarget() {
|
||||
|
||||
Item item1 = Item.builder().itemId("1").tags(Arrays.asList("a", "b")).build();
|
||||
Item item2 = Item.builder().itemId("1").tags(Arrays.asList("a", "c")).build();
|
||||
mongoTemplate.insert(Arrays.asList(item1, item2), Item.class);
|
||||
|
||||
TypedAggregation<Item> aggregation = newAggregation(Item.class,
|
||||
match(where("itemId").is("1")),
|
||||
unwind("tags"),
|
||||
project().and("itemId").as("itemId").and("tags").as("tags"),
|
||||
match(where("itemId").is("1").and("tags").is("c")));
|
||||
|
||||
AggregationResults<Document> results = mongoTemplate.aggregate(aggregation, Document.class);
|
||||
List<Document> mappedResults = results.getMappedResults();
|
||||
assertThat(mappedResults).hasSize(1);
|
||||
assertThat(mappedResults.get(0)).containsEntry("itemId", "1");
|
||||
}
|
||||
|
||||
private void createUsersWithReferencedPersons() {
|
||||
|
||||
mongoTemplate.dropCollection(User.class);
|
||||
@@ -2244,7 +2281,7 @@ public class AggregationTests {
|
||||
List<Item> items;
|
||||
}
|
||||
|
||||
// DATAMONGO-1491
|
||||
// DATAMONGO-1491, GH-4443
|
||||
@lombok.Data
|
||||
@Builder
|
||||
static class Item {
|
||||
@@ -2253,6 +2290,7 @@ public class AggregationTests {
|
||||
String itemId;
|
||||
Integer quantity;
|
||||
Long price;
|
||||
List<String> tags = new ArrayList<>();
|
||||
}
|
||||
|
||||
// DATAMONGO-1538
|
||||
|
||||
@@ -49,6 +49,7 @@ import com.mongodb.client.model.Projections;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Julia Lee
|
||||
*/
|
||||
public class AggregationUnitTests {
|
||||
|
||||
@@ -612,7 +613,7 @@ public class AggregationUnitTests {
|
||||
WithRetypedIdField.class, mappingContext,
|
||||
new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)));
|
||||
Document document = project(WithRetypedIdField.class).toDocument(context);
|
||||
assertThat(document).isEqualTo(new Document("$project", new Document("_id", 1).append("renamed-field", 1)));
|
||||
assertThat(document).isEqualTo(new Document("$project", new Document("_id", 1).append("renamed-field", 1).append("entries", 1)));
|
||||
}
|
||||
|
||||
@Test // GH-4038
|
||||
@@ -653,6 +654,22 @@ public class AggregationUnitTests {
|
||||
assertThat(documents.get(2)).isEqualTo("{ $sort : { 'serial_number' : -1, 'label_name' : -1 } }");
|
||||
}
|
||||
|
||||
@Test // GH-4443
|
||||
void fieldsExposingContextShouldUseCustomFieldNameFromRelaxedRootContext() {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
RelaxedTypeBasedAggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(
|
||||
WithRetypedIdField.class, mappingContext,
|
||||
new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)));
|
||||
|
||||
TypedAggregation<WithRetypedIdField> agg = newAggregation(WithRetypedIdField.class,
|
||||
unwind("entries"), match(where("foo").is("value 2")));
|
||||
List<Document> pipeline = agg.toPipeline(context);
|
||||
|
||||
Document fields = getAsDocument(pipeline.get(1), "$match");
|
||||
assertThat(fields.get("renamed-field")).isEqualTo("value 2");
|
||||
}
|
||||
|
||||
private Document extractPipelineElement(Document agg, int index, String operation) {
|
||||
|
||||
List<Document> pipeline = (List<Document>) agg.get("pipeline");
|
||||
@@ -672,5 +689,7 @@ public class AggregationUnitTests {
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Field("renamed-field") private String foo;
|
||||
|
||||
private List<String> entries = new ArrayList<>();
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,97 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link AggregationVariable}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class AggregationVariableUnitTests {
|
||||
|
||||
@Test // GH-4070
|
||||
void variableErrorsOnNullValue() {
|
||||
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> AggregationVariable.variable(null));
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void createsVariable() {
|
||||
|
||||
var variable = AggregationVariable.variable("$$now");
|
||||
|
||||
assertThat(variable.getTarget()).isEqualTo("$$now");
|
||||
assertThat(variable.isInternal()).isFalse();
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void prefixesVariableIfNeeded() {
|
||||
|
||||
var variable = AggregationVariable.variable("this");
|
||||
|
||||
assertThat(variable.getTarget()).isEqualTo("$$this");
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void localVariableErrorsOnNullValue() {
|
||||
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> AggregationVariable.localVariable(null));
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void localVariable() {
|
||||
|
||||
var variable = AggregationVariable.localVariable("$$this");
|
||||
|
||||
assertThat(variable.getTarget()).isEqualTo("$$this");
|
||||
assertThat(variable.isInternal()).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void prefixesLocalVariableIfNeeded() {
|
||||
|
||||
var variable = AggregationVariable.localVariable("this");
|
||||
|
||||
assertThat(variable.getTarget()).isEqualTo("$$this");
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void isVariableReturnsTrueForAggregationVariableTypes() {
|
||||
|
||||
var variable = Mockito.mock(AggregationVariable.class);
|
||||
|
||||
assertThat(AggregationVariable.isVariable(variable)).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void isVariableReturnsTrueForFieldThatTargetsVariable() {
|
||||
|
||||
var variable = Fields.field("value", "$$this");
|
||||
|
||||
assertThat(AggregationVariable.isVariable(variable)).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void isVariableReturnsFalseForFieldThatDontTargetsVariable() {
|
||||
|
||||
var variable = Fields.field("value", "$this");
|
||||
|
||||
assertThat(AggregationVariable.isVariable(variable)).isFalse();
|
||||
}
|
||||
}
|
||||
@@ -20,8 +20,6 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
import static org.springframework.data.mongodb.core.aggregation.Fields.*;
|
||||
import static org.springframework.data.mongodb.test.util.Assertions.*;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
@@ -39,8 +37,11 @@ import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce;
|
||||
import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.Variable;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.SetOperators.SetUnion;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
@@ -455,14 +456,44 @@ public class TypeBasedAggregationOperationContextUnitTests {
|
||||
.isEqualTo(new Document("val", "$withUnwrapped.prefix-with-at-field-annotation"));
|
||||
}
|
||||
|
||||
@Test // GH-4070
|
||||
void rendersLocalVariables() {
|
||||
|
||||
AggregationOperationContext context = getContext(WithLists.class);
|
||||
|
||||
Document agg = newAggregation(WithLists.class,
|
||||
project()
|
||||
.and(Reduce.arrayOf("listOfListOfString").withInitialValue(field("listOfString"))
|
||||
.reduce(SetUnion.arrayAsSet(Variable.VALUE.getTarget()).union(Variable.THIS.getTarget())))
|
||||
.as("listOfString")).toDocument("collection", context);
|
||||
|
||||
assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")).isEqualTo(Document.parse("""
|
||||
{
|
||||
"listOfString" : {
|
||||
"$reduce" : {
|
||||
"in" : { "$setUnion" : ["$$value", "$$this"] },
|
||||
"initialValue" : "$listOfString",
|
||||
"input" : "$listOfListOfString"
|
||||
}
|
||||
}
|
||||
}
|
||||
"""));
|
||||
}
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = "person")
|
||||
@AllArgsConstructor
|
||||
public static class FooPerson {
|
||||
|
||||
final ObjectId id;
|
||||
final String name;
|
||||
@org.springframework.data.mongodb.core.mapping.Field("last_name") final String lastName;
|
||||
final Age age;
|
||||
|
||||
public FooPerson(ObjectId id, String name, String lastName, Age age) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.lastName = lastName;
|
||||
this.age = age;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Age {
|
||||
@@ -553,4 +584,9 @@ public class TypeBasedAggregationOperationContextUnitTests {
|
||||
@org.springframework.data.mongodb.core.mapping.Field("with-at-field-annotation") //
|
||||
String atFieldAnnotatedValue;
|
||||
}
|
||||
|
||||
static class WithLists {
|
||||
public List<String> listOfString;
|
||||
public List<List<String>> listOfListOfString;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ import java.math.BigInteger;
|
||||
import java.net.URL;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
|
||||
@@ -110,6 +109,7 @@ import com.mongodb.DBRef;
|
||||
* @author Mark Paluch
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Heesu Jung
|
||||
* @author Julia Lee
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class MappingMongoConverterUnitTests {
|
||||
@@ -2623,7 +2623,7 @@ class MappingMongoConverterUnitTests {
|
||||
void projectShouldReadSimpleInterfaceProjection() {
|
||||
|
||||
org.bson.Document source = new org.bson.Document("birthDate",
|
||||
Date.from(LocalDate.of(1999, 12, 1).atStartOfDay().toInstant(ZoneOffset.UTC))).append("foo", "Walter");
|
||||
Date.from(LocalDate.of(1999, 12, 1).atStartOfDay(systemDefault()).toInstant())).append("foo", "Walter");
|
||||
|
||||
EntityProjectionIntrospector discoverer = EntityProjectionIntrospector.create(converter.getProjectionFactory(),
|
||||
EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy()
|
||||
@@ -2641,7 +2641,7 @@ class MappingMongoConverterUnitTests {
|
||||
void projectShouldReadSimpleDtoProjection() {
|
||||
|
||||
org.bson.Document source = new org.bson.Document("birthDate",
|
||||
Date.from(LocalDate.of(1999, 12, 1).atStartOfDay().toInstant(ZoneOffset.UTC))).append("foo", "Walter");
|
||||
Date.from(LocalDate.of(1999, 12, 1).atStartOfDay(systemDefault()).toInstant())).append("foo", "Walter");
|
||||
|
||||
EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(),
|
||||
EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy()
|
||||
|
||||
@@ -70,6 +70,7 @@ import com.mongodb.client.vault.ClientEncryptions;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Julia Lee
|
||||
*/
|
||||
public abstract class AbstractEncryptionTestBase {
|
||||
|
||||
@@ -450,7 +451,8 @@ public abstract class AbstractEncryptionTestBase {
|
||||
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
|
||||
|
||||
converterConfigurationAdapter
|
||||
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext));
|
||||
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext))
|
||||
.useNativeDriverJavaTimeCodecs();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
||||
@@ -45,6 +45,7 @@ import com.mongodb.client.vault.ClientEncryptions;
|
||||
* Encryption tests for client having {@link AutoEncryptionSettings#isBypassAutoEncryption()}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Julia Lee
|
||||
*/
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = Config.class)
|
||||
@@ -78,7 +79,8 @@ public class BypassAutoEncryptionTest extends AbstractEncryptionTestBase {
|
||||
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
|
||||
|
||||
converterConfigurationAdapter
|
||||
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext));
|
||||
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext))
|
||||
.useNativeDriverJavaTimeCodecs();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
||||
@@ -57,6 +57,7 @@ import com.mongodb.client.vault.ClientEncryptions;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Julia Lee
|
||||
*/
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = Config.class)
|
||||
@@ -82,7 +83,8 @@ public class EncryptionTests extends AbstractEncryptionTestBase {
|
||||
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
|
||||
|
||||
converterConfigurationAdapter
|
||||
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext));
|
||||
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext))
|
||||
.useNativeDriverJavaTimeCodecs();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
||||
@@ -88,7 +88,7 @@ class Person {
|
||||
----
|
||||
Account account = …
|
||||
|
||||
tempate.insert(account); <2>
|
||||
template.insert(account); <2>
|
||||
|
||||
template.update(Person.class)
|
||||
.matching(where("id").is(…))
|
||||
@@ -441,7 +441,7 @@ class Entity {
|
||||
"lastname" : "Long", <2>
|
||||
}
|
||||
----
|
||||
<1> Read/wirte the keys `fn` & `ln` from/to the linkage document based on the lookup query.
|
||||
<1> Read/write the keys `fn` & `ln` from/to the linkage document based on the lookup query.
|
||||
<2> Use non _id_ fields for the lookup of the target documents.
|
||||
====
|
||||
|
||||
@@ -477,7 +477,7 @@ class ToDocumentPointerConverter implements Converter<ReferencedObject, Document
|
||||
}
|
||||
}
|
||||
----
|
||||
<1> Read/wirte the keys `_id` from/to the reference document to use them in the lookup query.
|
||||
<1> Read/write the keys `_id` from/to the reference document to use them in the lookup query.
|
||||
<2> The collection name can be read from the reference document using its key.
|
||||
====
|
||||
|
||||
|
||||
@@ -350,6 +350,14 @@ You can add additional converters to the converter by overriding the `customConv
|
||||
MongoDB's native JSR-310 support can be enabled through `MongoConverterConfigurationAdapter.useNativeDriverJavaTimeCodecs()`.
|
||||
Also shown in the preceding example is a `LoggingEventListener`, which logs `MongoMappingEvent` instances that are posted onto Spring's `ApplicationContextEvent` infrastructure.
|
||||
|
||||
[TIP]
|
||||
====
|
||||
.Java Time Types
|
||||
|
||||
We recommend using MongoDB's native JSR-310 support via `MongoConverterConfigurationAdapter.useNativeDriverJavaTimeCodecs()` as described above as it is using an `UTC` based approach.
|
||||
The default JSR-310 support for `java.time` types inherited from Spring Data Commons uses the local machine timezone as reference and should only be used for backwards compatibility.
|
||||
====
|
||||
|
||||
NOTE: `AbstractMongoClientConfiguration` creates a `MongoTemplate` instance and registers it with the container under the name `mongoTemplate`.
|
||||
|
||||
The `base-package` property tells it where to scan for classes annotated with the `@org.springframework.data.mongodb.core.mapping.Document` annotation.
|
||||
|
||||
@@ -33,7 +33,7 @@ embedded schema objects that describe properties and subdocuments.
|
||||
<2> `required` is a property that describes which properties are required in a document. It can be specified optionally, along with other
|
||||
schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords].
|
||||
<3> `properties` is related to a schema object that describes an `object` type. It contains property-specific schema constraints.
|
||||
<4> `firstname` specifies constraints for the `firsname` field inside the document. Here, it is a string-based `properties` element declaring
|
||||
<4> `firstname` specifies constraints for the `firstname` field inside the document. Here, it is a string-based `properties` element declaring
|
||||
possible field values.
|
||||
<5> `address` is a subdocument defining a schema for values in its `postCode` field.
|
||||
====
|
||||
|
||||
@@ -77,7 +77,7 @@ Therefore, the `Sort` properties are mapped against the methods return type `Per
|
||||
<4> `$skip`, `$limit` and `$sort` can be passed on via a `Pageable` argument. Same as in <2>, the operators are appended to the pipeline definition. Methods accepting `Pageable` can return `Slice` for easier pagination.
|
||||
<5> Aggregation methods can return `Stream` to consume results directly from an underlying cursor. Make sure to close the stream after consuming it to release the server-side cursor by either calling `close()` or through `try-with-resources`.
|
||||
<6> Map the result of an aggregation returning a single `Document` to an instance of a desired `SumValue` target type.
|
||||
<7> Aggregations resulting in single document holding just an accumulation result like eg. `$sum` can be extracted directly from the result `Document`.
|
||||
<7> Aggregations resulting in single document holding just an accumulation result like e.g. `$sum` can be extracted directly from the result `Document`.
|
||||
To gain more control, you might consider `AggregationResult` as method return type as shown in <7>.
|
||||
<8> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`.
|
||||
<9> Like in <6>, a single value can be directly obtained from multiple result ``Document``s.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 4.1.2 (2023.0.2)
|
||||
Spring Data MongoDB 4.1.3 (2023.0.3)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -47,5 +47,6 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user