Compare commits

...

25 Commits

Author SHA1 Message Date
Mark Paluch
fd175af09c Release version 4.1.2 (2023.0.2).
See #4420
2023-07-14 13:54:07 +02:00
Mark Paluch
a6372e6629 Prepare 4.1.2 (2023.0.2).
See #4420
2023-07-14 13:52:53 +02:00
Mark Paluch
0b6c9978e6 Polishing.
Reformat code, replace known unsupported constructor with UnsupportedOperationException.

See #4432
Original pull request: #4439
2023-07-10 11:03:30 +02:00
Christoph Strobl
edeb423b0e Fix encryption of java.time types.
This commit makes sure to convert java.time types into their BsonValue representation before encrypting.

See #4432
Original pull request: #4439
2023-07-10 10:49:23 +02:00
Christoph Strobl
6cee61d807 Fix decryption when client is using AutoEncryptionSettings#isBypassAutoEncryption().
This commit makes sure to convert already decrypted entries returned by the driver in case the client is configured with encryption settings.

Closes #4432
Original pull request: #4439
2023-07-10 10:49:21 +02:00
Mark Paluch
70530d6ee4 Update CI properties.
See #4420
2023-07-03 09:48:57 +02:00
Mark Paluch
9cfc46e589 Upgrade to Maven Wrapper 3.9.3.
See #4435
2023-07-03 09:48:21 +02:00
Christoph Strobl
7cb27c7465 Polishing.
Update tests to make use of ValueSource.
Replace regex based path inspection with segment by segment analysis.

Original Pull Request: #4427
2023-06-28 13:30:50 +02:00
lijixue
0cd082e7fa Fix QueryMapper property path resolution for nested paths containing numeric values.
Prior to this fix a path that contains numeric values used as position parameters would have been stripped in a way that left out the last digit. This could lead to wrong path resolution if the incorrectly constructed property name accidentally matched an existing one.

Closes: #4426
Original Pull Request: #4427
2023-06-28 13:30:40 +02:00
John Blum
56e763c9c0 After release cleanups.
See #4386
2023-06-16 08:13:30 -07:00
John Blum
489f593395 Prepare next development iteration.
See #4386
2023-06-16 08:13:28 -07:00
John Blum
4f560f2ec3 Release version 4.1.1 (2023.0.1).
See #4386
2023-06-16 08:05:36 -07:00
John Blum
752d8c821e Prepare 4.1.1 (2023.0.1).
See #4386
2023-06-16 08:05:06 -07:00
Mark Paluch
b478e7068b Retain scroll direction across keyset scroll requests.
Closes #4413
2023-06-15 15:22:17 +02:00
Christoph Strobl
02fe73d052 Accept expression as input for filter aggregation operator.
Closes #4394
Original pull request: #4395
2023-06-14 14:19:34 +02:00
Christoph Strobl
29021d132f Fix converter registration when using driver native time codec.
This commit prevents converters from being used as writing converter causing asymmetric write/read operations.

Closes #4390
Original pull request: #4392
2023-06-14 11:03:49 +02:00
Mark Paluch
4d51d27cda Polishing.
Use extended switch syntax.

See #4404
Original pull request: #4412
2023-06-14 10:00:25 +02:00
Christoph Strobl
e2dc76eea3 Polishing.
Mark method potentially returning null as such and remove unused imports.

See #4404
Original pull request: #4412
2023-06-14 10:00:24 +02:00
Christoph Strobl
aecfd45968 Use exact matching for IN clause with ignore case.
Prior to this change the generated pattern would have matched more entries than it should have. The behavior is now aligned to its counterpart not using the IgnoreCase flag.

Closes #4404
Original pull request: #4412
2023-06-14 10:00:24 +02:00
Mark Paluch
0c36929833 Upgrade to Maven Wrapper 3.9.2.
See #4409
2023-06-13 08:53:53 +02:00
Mark Paluch
bedd94fe17 Use snapshot and milestone repositories instead of libs-snapshot and libs-milestone.
Closes #4401
2023-06-06 09:47:17 +02:00
Mark Paluch
b85b53443b Polishing.
Add missing Override annotations.
2023-05-26 14:49:41 +02:00
Mark Paluch
99070162bb Update Jenkins triggers after GA release.
See #4369
2023-05-12 14:48:20 +02:00
Christoph Strobl
9218b22d12 After release cleanups.
See #4369
2023-05-12 14:19:11 +02:00
Christoph Strobl
e5aab51add Prepare next development iteration.
See #4369
2023-05-12 14:19:09 +02:00
30 changed files with 1271 additions and 579 deletions

View File

@@ -1,2 +1,2 @@
#Thu Apr 06 16:16:28 CEST 2023
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.1/apache-maven-3.9.1-bin.zip
#Mon Jul 03 09:48:21 CEST 2023
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.3/apache-maven-3.9.3-bin.zip

2
Jenkinsfile vendored
View File

@@ -9,7 +9,7 @@ pipeline {
triggers {
pollSCM 'H/10 * * * *'
upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS)
upstream(upstreamProjects: "spring-data-commons/3.1.x", threshold: hudson.model.Result.SUCCESS)
}
options {

View File

@@ -1,5 +1,5 @@
# Java versions
java.main.tag=17.0.6_10-jdk-focal
java.main.tag=17.0.7_7-jdk-focal
java.next.tag=20-jdk-jammy
# Docker container images - standard
@@ -7,15 +7,15 @@ docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/ecli
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
# Supported versions of MongoDB
docker.mongodb.4.4.version=4.4.18
docker.mongodb.5.0.version=5.0.14
docker.mongodb.6.0.version=6.0.4
docker.mongodb.4.4.version=4.4.22
docker.mongodb.5.0.version=5.0.18
docker.mongodb.6.0.version=6.0.7
# Supported versions of Redis
docker.redis.6.version=6.2.10
docker.redis.6.version=6.2.12
# Supported versions of Cassandra
docker.cassandra.3.version=3.11.14
docker.cassandra.3.version=3.11.15
# Docker environment settings
docker.java.inside.basic=-v $HOME:/tmp/jenkins-home

36
pom.xml
View File

@@ -5,7 +5,7 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.1.0</version>
<version>4.1.2</version>
<packaging>pom</packaging>
<name>Spring Data MongoDB</name>
@@ -15,7 +15,7 @@
<parent>
<groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId>
<version>3.1.0</version>
<version>3.1.2</version>
</parent>
<modules>
@@ -26,7 +26,7 @@
<properties>
<project.type>multi</project.type>
<dist.id>spring-data-mongodb</dist.id>
<springdata.commons>3.1.0</springdata.commons>
<springdata.commons>3.1.2</springdata.commons>
<mongo>4.9.1</mongo>
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
<jmh.version>1.19</jmh.version>
@@ -144,34 +144,8 @@
</dependencies>
<repositories>
<repository>
<id>spring-libs-release</id>
<url>https://repo.spring.io/libs-release</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
<repository>
<id>sonatype-libs-snapshot</id>
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
<releases>
<enabled>false</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>spring-plugins-release</id>
<url>https://repo.spring.io/plugins-release</url>
</pluginRepository>
<pluginRepository>
<id>spring-libs-milestone</id>
<url>https://repo.spring.io/libs-milestone</url>
</pluginRepository>
</pluginRepositories>
</project>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.1.0</version>
<version>4.1.2</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -15,7 +15,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.1.0</version>
<version>4.1.2</version>
<relativePath>../pom.xml</relativePath>
</parent>
@@ -80,15 +80,4 @@
</build>
<pluginRepositories>
<pluginRepository>
<id>spring-plugins-release</id>
<url>https://repo.spring.io/plugins-release</url>
</pluginRepository>
<pluginRepository>
<id>spring-plugins-snapshot</id>
<url>https://repo.spring.io/libs-snapshot</url>
</pluginRepository>
</pluginRepositories>
</project>

View File

@@ -13,7 +13,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.1.0</version>
<version>4.1.2</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -61,7 +61,8 @@ class ScrollUtils {
Document sortObject = query.getSortObject();
KeysetScrollPosition keyset = query.getKeyset();
KeysetScrollDirector director = KeysetScrollDirector.of(keyset.getDirection());
Direction direction = keyset.getDirection();
KeysetScrollDirector director = KeysetScrollDirector.of(direction);
List<T> resultsToUse = director.postPostProcessResults(result, query.getLimit());
@@ -71,7 +72,7 @@ class ScrollUtils {
Entity<T> entity = operations.forEntity(last);
Map<String, Object> keys = entity.extractKeys(sortObject, sourceType);
return ScrollPosition.forward(keys);
return ScrollPosition.of(keys, direction);
};
return Window.from(resultsToUse, positionFunction, hasMoreElements(result, query.getLimit()));

View File

@@ -79,7 +79,7 @@ public class ArrayOperators {
private final @Nullable String fieldReference;
private final @Nullable AggregationExpression expression;
private final @Nullable Collection values;
private final @Nullable Collection<?> values;
/**
* Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}.
@@ -214,6 +214,10 @@ public class ArrayOperators {
return Filter.filter(fieldReference);
}
if (usesExpression()) {
return Filter.filter(expression);
}
Assert.state(values != null, "Values must not be null");
return Filter.filter(new ArrayList<>(values));
}
@@ -317,7 +321,8 @@ public class ArrayOperators {
}
/**
* Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort order}.
* Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort
* order}.
*
* @return new instance of {@link SortArray}.
* @since 4.0
@@ -397,8 +402,8 @@ public class ArrayOperators {
}
/**
* Creates new {@link AggregationExpression} that return the last element in the given array.
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
* Creates new {@link AggregationExpression} that return the last element in the given array. <strong>NOTE:</strong>
* Requires MongoDB 4.4 or later.
*
* @return new instance of {@link Last}.
* @since 3.4
@@ -649,6 +654,19 @@ public class ArrayOperators {
return new FilterExpressionBuilder().filter(field);
}
/**
* Set the {@link AggregationExpression} resolving to an arry to apply the {@code $filter} to.
*
* @param expression must not be {@literal null}.
* @return never {@literal null}.
* @since 4.2
*/
public static AsBuilder filter(AggregationExpression expression) {
Assert.notNull(expression, "Field must not be null");
return new FilterExpressionBuilder().filter(expression);
}
/**
* Set the {@literal values} to apply the {@code $filter} to.
*
@@ -681,7 +699,16 @@ public class ArrayOperators {
}
private Object getMappedInput(AggregationOperationContext context) {
return input instanceof Field field ? context.getReference(field).toString() : input;
if (input instanceof Field field) {
return context.getReference(field).toString();
}
if (input instanceof AggregationExpression expression) {
return expression.toDocument(context);
}
return input;
}
private Object getMappedCondition(AggregationOperationContext context) {
@@ -715,6 +742,15 @@ public class ArrayOperators {
* @return
*/
AsBuilder filter(Field field);
/**
* Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to.
*
* @param expression must not be {@literal null}.
* @return
* @since 4.1.1
*/
AsBuilder filter(AggregationExpression expression);
}
/**
@@ -797,6 +833,14 @@ public class ArrayOperators {
return this;
}
@Override
public AsBuilder filter(AggregationExpression expression) {
Assert.notNull(expression, "Expression must not be null");
filter.input = expression;
return this;
}
@Override
public ConditionBuilder as(String variableName) {
@@ -1333,7 +1377,7 @@ public class ArrayOperators {
Assert.notNull(expressions, "PropertyExpressions must not be null");
return new Reduce(Fields.field(fieldReference), initialValue,
Arrays.<AggregationExpression>asList(expressions));
Arrays.<AggregationExpression> asList(expressions));
}
};
}
@@ -1690,7 +1734,7 @@ public class ArrayOperators {
* @author Christoph Strobl
* @author Shashank Sharma
* @see <a href=
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
* @since 2.2
*/
public static class In extends AbstractAggregationExpression {
@@ -1779,7 +1823,7 @@ public class ArrayOperators {
*
* @author Christoph Strobl
* @see <a href=
* "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/</a>
* "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/</a>
* @since 2.1
*/
public static class ArrayToObject extends AbstractAggregationExpression {
@@ -1976,7 +2020,7 @@ public class ArrayOperators {
/**
* Set the order to put elements in.
*
*
* @param sort must not be {@literal null}.
* @return new instance of {@link SortArray}.
*/

View File

@@ -41,6 +41,7 @@ import org.springframework.data.convert.PropertyValueConversions;
import org.springframework.data.convert.PropertyValueConverter;
import org.springframework.data.convert.PropertyValueConverterFactory;
import org.springframework.data.convert.PropertyValueConverterRegistrar;
import org.springframework.data.convert.ReadingConverter;
import org.springframework.data.convert.SimplePropertyValueConversions;
import org.springframework.data.convert.WritingConverter;
import org.springframework.data.mapping.model.SimpleTypeHolder;
@@ -361,6 +362,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
}, this.propertyValueConversions);
}
@ReadingConverter
private enum DateToUtcLocalDateTimeConverter implements Converter<Date, LocalDateTime> {
INSTANCE;
@@ -370,6 +372,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
}
}
@ReadingConverter
private enum DateToUtcLocalTimeConverter implements Converter<Date, LocalTime> {
INSTANCE;
@@ -379,6 +382,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
}
}
@ReadingConverter
private enum DateToUtcLocalDateConverter implements Converter<Date, LocalDate> {
INSTANCE;

View File

@@ -1089,7 +1089,7 @@ public class QueryMapper {
protected static class MetadataBackedField extends Field {
private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?");
private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+(?!$)");
private static final Pattern NUMERIC_SEGMENT = Pattern.compile("\\d+");
private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s; Associations can only be pointed to directly or via their id property";
private final MongoPersistentEntity<?> entity;
@@ -1231,14 +1231,13 @@ public class QueryMapper {
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression,
@Nullable MongoPersistentProperty sourceProperty) {
String rawPath = removePlaceholders(POSITIONAL_OPERATOR,
removePlaceholders(DOT_POSITIONAL_PATTERN, pathExpression));
if (sourceProperty != null && sourceProperty.getOwner().equals(entity)) {
return mappingContext.getPersistentPropertyPath(
PropertyPath.from(Pattern.quote(sourceProperty.getName()), entity.getTypeInformation()));
}
String rawPath = resolvePath(pathExpression);
PropertyPath path = forName(rawPath);
if (path == null || isPathToJavaLangClassProperty(path)) {
return null;
@@ -1333,6 +1332,38 @@ public class QueryMapper {
return false;
}
private static String resolvePath(String source) {
String[] segments = source.split("\\.");
if (segments.length == 1) {
return source;
}
List<String> path = new ArrayList<>(segments.length);
/* always start from a property, so we can skip the first segment.
from there remove any position placeholder */
for(int i=1; i < segments.length; i++) {
String segment = segments[i];
if (segment.startsWith("[") && segment.endsWith("]")) {
continue;
}
if (NUMERIC_SEGMENT.matcher(segment).matches()) {
continue;
}
path.add(segment);
}
// when property is followed only by placeholders eg. 'values.0.3.90'
// or when there is no difference in the number of segments
if (path.isEmpty() || segments.length == path.size() + 1) {
return source;
}
path.add(0, segments[0]);
return StringUtils.collectionToDelimitedString(path, ".");
}
/**
* Return the {@link Converter} to be used to created the mapped key. Default implementation will use
* {@link PropertyToFieldNameConverter}.

View File

@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.convert.encryption;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -63,7 +64,7 @@ public class MongoEncryptionConverter implements EncryptingConverter<Object, Obj
public Object read(Object value, MongoConversionContext context) {
Object decrypted = EncryptingConverter.super.read(value, context);
return decrypted instanceof BsonValue ? BsonUtils.toJavaType((BsonValue) decrypted) : decrypted;
return decrypted instanceof BsonValue bsonValue ? BsonUtils.toJavaType(bsonValue) : decrypted;
}
@Override
@@ -87,36 +88,56 @@ public class MongoEncryptionConverter implements EncryptingConverter<Object, Obj
}
MongoPersistentProperty persistentProperty = getProperty(context);
if (getProperty(context).isCollectionLike() && decryptedValue instanceof Iterable<?> iterable) {
int size = iterable instanceof Collection<?> c ? c.size() : 10;
if (!persistentProperty.isEntity()) {
Collection<Object> collection = CollectionFactory.createCollection(persistentProperty.getType(), size);
iterable.forEach(it -> collection.add(BsonUtils.toJavaType((BsonValue) it)));
iterable.forEach(it -> {
if (it instanceof BsonValue bsonValue) {
collection.add(BsonUtils.toJavaType(bsonValue));
} else {
collection.add(context.read(it, persistentProperty.getActualType()));
}
});
return collection;
} else {
Collection<Object> collection = CollectionFactory.createCollection(persistentProperty.getType(), size);
iterable.forEach(it -> {
collection.add(context.read(BsonUtils.toJavaType((BsonValue) it), persistentProperty.getActualType()));
if (it instanceof BsonValue bsonValue) {
collection.add(context.read(BsonUtils.toJavaType(bsonValue), persistentProperty.getActualType()));
} else {
collection.add(context.read(it, persistentProperty.getActualType()));
}
});
return collection;
}
}
if (!persistentProperty.isEntity() && decryptedValue instanceof BsonValue bsonValue) {
if (persistentProperty.isMap() && persistentProperty.getType() != Document.class) {
return new LinkedHashMap<>((Document) BsonUtils.toJavaType(bsonValue));
if (!persistentProperty.isEntity() && persistentProperty.isMap()) {
if (persistentProperty.getType() != Document.class) {
if (decryptedValue instanceof BsonValue bsonValue) {
return new LinkedHashMap<>((Document) BsonUtils.toJavaType(bsonValue));
}
if (decryptedValue instanceof Document document) {
return new LinkedHashMap<>(document);
}
if (decryptedValue instanceof Map map) {
return map;
}
}
return BsonUtils.toJavaType(bsonValue);
}
if (persistentProperty.isEntity() && decryptedValue instanceof BsonDocument bsonDocument) {
return context.read(BsonUtils.toJavaType(bsonDocument), persistentProperty.getTypeInformation().getType());
}
if (persistentProperty.isEntity() && decryptedValue instanceof Document document) {
return context.read(document, persistentProperty.getTypeInformation().getType());
}
return decryptedValue;
}

View File

@@ -89,25 +89,22 @@ public enum MongoRegexCreator {
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, matcherType);
switch (matcherType) {
case STARTING_WITH:
return String.format("^%s", regex);
case ENDING_WITH:
return String.format("%s$", regex);
case CONTAINING:
return String.format(".*%s.*", regex);
case EXACT:
return String.format("^%s$", regex);
default:
return regex;
}
return switch (matcherType) {
case STARTING_WITH -> String.format("^%s", regex);
case ENDING_WITH -> String.format("%s$", regex);
case CONTAINING -> String.format(".*%s.*", regex);
case EXACT -> String.format("^%s$", regex);
default -> regex;
};
}
/**
* @param source
* @return
* @since 2.2.14
* @deprecated since 4.1.1
*/
@Deprecated(since = "4.1.1", forRemoval = true)
public Object toCaseInsensitiveMatch(Object source) {
return source instanceof String stringValue ? new BsonRegularExpression(Pattern.quote(stringValue), "i") : source;
}

View File

@@ -87,11 +87,14 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
this.bucket = bucket;
}
@Override
public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType,
@Nullable Object metadata) {
return store(content, filename, contentType, toDocument(metadata));
}
@Override
@SuppressWarnings("unchecked")
public <T> T store(GridFsObject<T, InputStream> upload) {
GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(),
@@ -110,6 +113,7 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
return upload.getFileId();
}
@Override
public GridFSFindIterable find(Query query) {
Assert.notNull(query, "Query must not be null");
@@ -130,10 +134,12 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
return iterable;
}
@Override
public GridFSFile findOne(Query query) {
return find(query).first();
}
@Override
public void delete(Query query) {
for (GridFSFile gridFSFile : find(query)) {
@@ -141,10 +147,12 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
}
}
@Override
public ClassLoader getClassLoader() {
return dbFactory.getClass().getClassLoader();
}
@Override
public GridFsResource getResource(String location) {
return Optional.ofNullable(findOne(query(whereFilename().is(location)))) //
@@ -152,6 +160,7 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
.orElseGet(() -> GridFsResource.absent(location));
}
@Override
public GridFsResource getResource(GridFSFile file) {
Assert.notNull(file, "GridFSFile must not be null");
@@ -159,6 +168,7 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
return new GridFsResource(file, getGridFs().openDownloadStream(file.getId()));
}
@Override
public GridFsResource[] getResources(String locationPattern) {
if (!StringUtils.hasText(locationPattern)) {
@@ -184,6 +194,8 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
private GridFSBucket getGridFs() {
Assert.notNull(dbFactory, "MongoDatabaseFactory must not be null");
MongoDatabase db = dbFactory.getMongoDatabase();
return bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket);
}

View File

@@ -82,7 +82,7 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
*
* @param dbFactory must not be {@literal null}.
* @param converter must not be {@literal null}.
* @param bucket
* @param bucket can be {@literal null}.
*/
public ReactiveGridFsTemplate(ReactiveMongoDatabaseFactory dbFactory, MongoConverter converter,
@Nullable String bucket) {
@@ -96,7 +96,7 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
* @param dataBufferFactory must not be {@literal null}.
* @param dbFactory must not be {@literal null}.
* @param converter must not be {@literal null}.
* @param bucket
* @param bucket can be {@literal null}.
*/
public ReactiveGridFsTemplate(DataBufferFactory dataBufferFactory, ReactiveMongoDatabaseFactory dbFactory,
MongoConverter converter, @Nullable String bucket) {
@@ -117,6 +117,8 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
return store(content, filename, contentType, toDocument(metadata));
}
@Override
@SuppressWarnings("unchecked")
public <T> Mono<T> store(GridFsObject<T, Publisher<DataBuffer>> upload) {
GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(),
@@ -274,6 +276,7 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
this.sortObject = sortObject;
}
@Override
public GridFSFindPublisher doInBucket(GridFSBucket bucket) {
GridFSFindPublisher findPublisher = bucket.find(queryObject).sort(sortObject);
@@ -311,21 +314,8 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
}
}
private static class UploadCallback implements ReactiveBucketCallback<Void> {
private final BsonValue fileId;
private final String filename;
private final Publisher<ByteBuffer> source;
private final GridFSUploadOptions uploadOptions;
public UploadCallback(BsonValue fileId, String filename, Publisher<ByteBuffer> source,
GridFSUploadOptions uploadOptions) {
this.fileId = fileId;
this.filename = filename;
this.source = source;
this.uploadOptions = uploadOptions;
}
private record UploadCallback(BsonValue fileId, String filename, Publisher<ByteBuffer> source,
GridFSUploadOptions uploadOptions) implements ReactiveBucketCallback<Void> {
@Override
public GridFSUploadPublisher<Void> doInBucket(GridFSBucket bucket) {
@@ -333,19 +323,8 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
}
}
private static class AutoIdCreatingUploadCallback implements ReactiveBucketCallback<ObjectId> {
private final String filename;
private final Publisher<ByteBuffer> source;
private final GridFSUploadOptions uploadOptions;
public AutoIdCreatingUploadCallback(String filename, Publisher<ByteBuffer> source,
GridFSUploadOptions uploadOptions) {
this.filename = filename;
this.source = source;
this.uploadOptions = uploadOptions;
}
private record AutoIdCreatingUploadCallback(String filename, Publisher<ByteBuffer> source,
GridFSUploadOptions uploadOptions) implements ReactiveBucketCallback<ObjectId> {
@Override
public GridFSUploadPublisher<ObjectId> doInBucket(GridFSBucket bucket) {
@@ -353,13 +332,7 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
}
}
private static class DeleteCallback implements ReactiveBucketCallback<Void> {
private final BsonValue id;
public DeleteCallback(BsonValue id) {
this.id = id;
}
private record DeleteCallback(BsonValue id) implements ReactiveBucketCallback<Void> {
@Override
public Publisher<Void> doInBucket(GridFSBucket bucket) {

View File

@@ -25,7 +25,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.bson.BsonRegularExpression;
import org.springframework.data.domain.Range;
import org.springframework.data.domain.Range.Bound;
import org.springframework.data.domain.Sort;
@@ -52,6 +52,7 @@ import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
import org.springframework.data.repository.query.parser.Part.Type;
import org.springframework.data.repository.query.parser.PartTree;
import org.springframework.data.util.Streamable;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
@@ -352,6 +353,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
* @param part
* @return the regex options or {@literal null}.
*/
@Nullable
private String toRegexOptions(Part part) {
String regexOptions = null;
@@ -390,7 +392,18 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
Streamable<?> streamable = asStreamable(iterator.next());
if (!isSimpleComparisionPossible(part)) {
streamable = streamable.map(MongoRegexCreator.INSTANCE::toCaseInsensitiveMatch);
MatchMode matchMode = toMatchMode(part.getType());
String regexOptions = toRegexOptions(part);
streamable = streamable.map(it -> {
if (it instanceof String value) {
return new BsonRegularExpression(MongoRegexCreator.INSTANCE.toRegularExpression(value, matchMode),
regexOptions);
}
return it;
});
}
return streamable.toList();
@@ -481,6 +494,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
return MatchMode.REGEX;
case NEGATING_SIMPLE_PROPERTY:
case SIMPLE_PROPERTY:
case IN:
return MatchMode.EXACT;
default:
return MatchMode.DEFAULT;

View File

@@ -15,6 +15,12 @@
*/
package org.springframework.data.mongodb.util;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.time.temporal.Temporal;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@@ -280,36 +286,22 @@ public class BsonUtils {
*/
public static Object toJavaType(BsonValue value) {
switch (value.getBsonType()) {
case INT32:
return value.asInt32().getValue();
case INT64:
return value.asInt64().getValue();
case STRING:
return value.asString().getValue();
case DECIMAL128:
return value.asDecimal128().doubleValue();
case DOUBLE:
return value.asDouble().getValue();
case BOOLEAN:
return value.asBoolean().getValue();
case OBJECT_ID:
return value.asObjectId().getValue();
case DB_POINTER:
return new DBRef(value.asDBPointer().getNamespace(), value.asDBPointer().getId());
case BINARY:
return value.asBinary().getData();
case DATE_TIME:
return new Date(value.asDateTime().getValue());
case SYMBOL:
return value.asSymbol().getSymbol();
case ARRAY:
return value.asArray().toArray();
case DOCUMENT:
return Document.parse(value.asDocument().toJson());
default:
return value;
}
return switch (value.getBsonType()) {
case INT32 -> value.asInt32().getValue();
case INT64 -> value.asInt64().getValue();
case STRING -> value.asString().getValue();
case DECIMAL128 -> value.asDecimal128().doubleValue();
case DOUBLE -> value.asDouble().getValue();
case BOOLEAN -> value.asBoolean().getValue();
case OBJECT_ID -> value.asObjectId().getValue();
case DB_POINTER -> new DBRef(value.asDBPointer().getNamespace(), value.asDBPointer().getId());
case BINARY -> value.asBinary().getData();
case DATE_TIME -> new Date(value.asDateTime().getValue());
case SYMBOL -> value.asSymbol().getSymbol();
case ARRAY -> value.asArray().toArray();
case DOCUMENT -> Document.parse(value.asDocument().toJson());
default -> value;
};
}
/**
@@ -358,17 +350,36 @@ public class BsonUtils {
return new BsonDouble(floatValue);
}
if(source instanceof Binary binary) {
if (source instanceof Binary binary) {
return new BsonBinary(binary.getType(), binary.getData());
}
if (source instanceof Temporal) {
if (source instanceof Instant value) {
return new BsonDateTime(value.toEpochMilli());
}
if (source instanceof LocalDateTime value) {
return new BsonDateTime(value.toInstant(ZoneOffset.UTC).toEpochMilli());
}
if (source instanceof LocalDate value) {
return new BsonDateTime(value.atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli());
}
if (source instanceof LocalTime value) {
return new BsonDateTime(value.atDate(LocalDate.ofEpochDay(0L)).toInstant(ZoneOffset.UTC).toEpochMilli());
}
}
if (source instanceof Date date) {
new BsonDateTime(date.getTime());
}
throw new IllegalArgumentException(String.format("Unable to convert %s (%s) to BsonValue.", source,
source != null ? source.getClass().getName() : "null"));
}
/**
* Merge the given {@link Document documents} into on in the given order. Keys contained within multiple documents are
* overwritten by their follow ups.
* overwritten by their follow-ups.
*
* @param documents must not be {@literal null}. Can be empty.
* @return the document containing all key value pairs.
@@ -669,7 +680,7 @@ public class BsonUtils {
if (value instanceof Collection<?> collection) {
return toString(collection);
} else if (value instanceof Map<?,?> map) {
} else if (value instanceof Map<?, ?> map) {
return toString(map);
} else if (ObjectUtils.isArray(value)) {
return toString(Arrays.asList(ObjectUtils.toObjectArray(value)));
@@ -691,8 +702,9 @@ public class BsonUtils {
private static String toString(Map<?, ?> source) {
// Avoid String.format for performance
return iterableToDelimitedString(source.entrySet(), "{ ", " }",
entry -> String.format("\"%s\" : %s", entry.getKey(), toJson(entry.getValue())));
entry -> "\"" + entry.getKey() + "\" : " + toJson(entry.getValue()));
}
private static String toString(Collection<?> source) {

View File

@@ -193,9 +193,30 @@ class MongoTemplateScrollTests {
window = template.scroll(q.with(window.positionAt(0)).limit(2), Person.class);
assertThat(window).hasSize(2);
assertThat(window).containsOnly(john20, john40_1);
assertThat(window.hasNext()).isTrue();
assertThat(window.isLast()).isFalse();
assertThat(window).containsOnly(jane_20, jane_40);
assertThat(window.hasNext()).isFalse();
assertThat(window.isLast()).isTrue();
}
@Test // GH-4413
void shouldAllowInitialBackwardSort() {
Person jane_20 = new Person("Jane", 20);
Person jane_40 = new Person("Jane", 40);
Person jane_42 = new Person("Jane", 42);
Person john20 = new Person("John", 20);
Person john40_1 = new Person("John", 40);
Person john40_2 = new Person("John", 40);
template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42));
Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age"));
q.with(ScrollPosition.keyset().backward()).limit(3);
Window<Person> window = template.scroll(q, Person.class);
assertThat(window).containsExactly(john20, john40_1, john40_2);
window = template.scroll(q.with(window.positionAt(0)).limit(3), Person.class);
assertThat(window).containsExactly(jane_20, jane_40, jane_42);
}
@ParameterizedTest // GH-4308

View File

@@ -69,6 +69,8 @@ import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.aggregation.StringOperators;
import org.springframework.data.mongodb.core.convert.LazyLoadingProxy;
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
import org.springframework.data.mongodb.core.index.Index;
import org.springframework.data.mongodb.core.index.IndexField;
@@ -1789,6 +1791,30 @@ public class MongoTemplateTests {
assertThat(result.get(0).date).isNotNull();
}
@Test // GH-4390
void nativeDriverDateTimeCodecShouldBeApplied/*when configured*/() {
MongoTestTemplate ops = new MongoTestTemplate(cfg -> {
cfg.configureConversion(conversion -> {
conversion.customConversions(
MongoCustomConversions.create(MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs));
});
});
TypeWithDate source = new TypeWithDate();
source.id = "id-1";
source.date = Date.from(Instant.now());
ops.save(source);
var dbDate = ops.execute(TypeWithDate.class,
collection -> collection.find(new org.bson.Document("_id", source.id)).first().get("date"));
TypeWithDate target = ops.findOne(query(where("date").is(source.date)), TypeWithDate.class);
assertThat(target.date).isEqualTo(source.date).isEqualTo(dbDate);
}
@Test // DATAMONGO-540
public void findOneAfterUpsertForNonExistingObjectReturnsTheInsertedObject() {

View File

@@ -0,0 +1,56 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import static org.assertj.core.api.AssertionsForClassTypes.*;
import static org.mockito.Mockito.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.Test;
import org.springframework.data.domain.KeysetScrollPosition;
import org.springframework.data.domain.ScrollPosition;
import org.springframework.data.domain.Window;
import org.springframework.data.mongodb.core.EntityOperations.Entity;
import org.springframework.data.mongodb.core.query.Query;
/**
* Unit tests for {@link ScrollUtils}.
*
* @author Mark Paluch
*/
class ScrollUtilsUnitTests {
@Test // GH-4413
void positionShouldRetainScrollDirection() {
Query query = new Query();
query.with(ScrollPosition.keyset().backward());
EntityOperations entityOperationsMock = mock(EntityOperations.class);
Entity entityMock = mock(Entity.class);
when(entityOperationsMock.forEntity(any())).thenReturn(entityMock);
when(entityMock.extractKeys(any(), any())).thenReturn(Map.of("k", "v"));
Window<Integer> window = ScrollUtils.createWindow(query, new ArrayList<>(List.of(1, 2, 3)), Integer.class,
entityOperationsMock);
assertThat(window.positionAt(0)).isInstanceOf(KeysetScrollPosition.class);
assertThat(((KeysetScrollPosition) window.positionAt(0)).scrollsBackward()).isTrue();
}
}

View File

@@ -117,6 +117,23 @@ class FilterExpressionUnitTests {
assertThat($filter).isEqualTo(new Document(expected));
}
@Test // GH-4394
void filterShouldAcceptExpression() {
Document $filter = ArrayOperators.arrayOf(ObjectOperators.valueOf("data.metadata").toArray()).filter().as("item")
.by(ComparisonOperators.valueOf("item.price").greaterThan("field-1")).toDocument(Aggregation.DEFAULT_CONTEXT);
Document expected = Document.parse("""
{ $filter : {
input: { $objectToArray: "$data.metadata" },
as: "item",
cond: { $gt: [ "$$item.price", "$field-1" ] }
}}
""");
assertThat($filter).isEqualTo(expected);
}
private Document extractFilterOperatorFromDocument(Document source) {
List<Object> pipeline = DocumentTestUtils.getAsDBList(source, "pipeline");

View File

@@ -64,6 +64,16 @@ class MongoCustomConversionsUnitTests {
assertThat(conversions.getPropertyValueConversions().hasValueConverter(persistentProperty)).isTrue();
}
@Test // GH-4390
void doesNotReturnConverterForNativeTimeTimeIfUsingDriverCodec() {
MongoCustomConversions conversions = MongoCustomConversions.create(config -> {
config.useNativeDriverJavaTimeCodecs();
});
assertThat(conversions.getCustomWriteTarget(Date.class)).isEmpty();
}
static class DateToZonedDateTimeConverter implements Converter<Date, ZonedDateTime> {
@Override

View File

@@ -35,6 +35,8 @@ import org.bson.types.ObjectId;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.core.convert.converter.Converter;
@@ -1213,24 +1215,26 @@ class UpdateMapperUnitTests {
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.a.b.d", "e")));
}
@Test // GH-3775
void mapNestedIntegerFieldCorrectly() {
@ParameterizedTest // GH-3775, GH-4426
@ValueSource(strings = {"levelOne.0.1.3", "levelOne.0.1.32", "levelOne2.0.1.32", "levelOne2.0.1.320"})
void mapNestedIntegerFieldCorrectly(String path) {
Update update = new Update().set("levelOne.0.1.3", "4");
Update update = new Update().set(path, "4");
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
context.getPersistentEntity(EntityWithNestedMap.class));
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0.1.3", "4")));
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document(path, "4")));
}
@Test // GH-3775
void mapNestedMixedStringIntegerFieldCorrectly() {
@ParameterizedTest // GH-3775, GH-4426
@ValueSource(strings = {"levelOne.0.1.c", "levelOne.0.1.c.32", "levelOne2.0.1.32.c", "levelOne2.0.1.c.320"})
void mapNestedMixedStringIntegerFieldCorrectly(String path) {
Update update = new Update().set("levelOne.0.1.c", "4");
Update update = new Update().set(path, "4");
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
context.getPersistentEntity(EntityWithNestedMap.class));
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0.1.c", "4")));
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document(path, "4")));
}
@Test // GH-3775
@@ -1720,6 +1724,7 @@ class UpdateMapperUnitTests {
static class EntityWithNestedMap {
Map<String, Map<String, Map<String, Object>>> levelOne;
Map<String, Map<String, Map<String, Object>>> levelOne2;
}
static class Customer {

View File

@@ -0,0 +1,754 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.encryption;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import java.security.SecureRandom;
import java.time.LocalDate;
import java.time.Month;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import org.assertj.core.api.Assertions;
import org.bson.BsonBinary;
import org.bson.Document;
import org.bson.types.Binary;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.dao.PermissionDeniedDataAccessException;
import org.springframework.data.convert.PropertyValueConverterFactory;
import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter;
import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.util.Lazy;
import com.mongodb.ClientEncryptionSettings;
import com.mongodb.ConnectionString;
import com.mongodb.MongoClientSettings;
import com.mongodb.MongoNamespace;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoClients;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.Indexes;
import com.mongodb.client.model.vault.DataKeyOptions;
import com.mongodb.client.vault.ClientEncryption;
import com.mongodb.client.vault.ClientEncryptions;
/**
* @author Christoph Strobl
*/
public abstract class AbstractEncryptionTestBase {
@Autowired MongoTemplate template;
@Test // GH-4284
void encryptAndDecryptSimpleValue() {
Person source = new Person();
source.id = "id-1";
source.ssn = "mySecretSSN";
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("ssn")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4432
void encryptAndDecryptJavaTime() {
Person source = new Person();
source.id = "id-1";
source.today = LocalDate.of(1979, Month.SEPTEMBER, 18);
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("today")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptComplexValue() {
Person source = new Person();
source.id = "id-1";
source.address = new Address();
source.address.city = "NYC";
source.address.street = "4th Ave.";
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("address")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptValueWithinComplexOne() {
Person source = new Person();
source.id = "id-1";
source.encryptedZip = new AddressWithEncryptedZip();
source.encryptedZip.city = "Boston";
source.encryptedZip.street = "central square";
source.encryptedZip.zip = "1234567890";
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> {
assertThat(it.get("encryptedZip")).isInstanceOf(Document.class);
assertThat(it.get("encryptedZip", Document.class).get("city")).isInstanceOf(String.class);
assertThat(it.get("encryptedZip", Document.class).get("street")).isInstanceOf(String.class);
assertThat(it.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class);
}) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptListOfSimpleValue() {
Person source = new Person();
source.id = "id-1";
source.listOfString = Arrays.asList("spring", "data", "mongodb");
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("listOfString")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptListOfComplexValue() {
Person source = new Person();
source.id = "id-1";
Address address = new Address();
address.city = "SFO";
address.street = "---";
source.listOfComplex = Collections.singletonList(address);
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("listOfComplex")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptMapOfSimpleValues() {
Person source = new Person();
source.id = "id-1";
source.mapOfString = Map.of("k1", "v1", "k2", "v2");
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("mapOfString")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptMapOfComplexValues() {
Person source = new Person();
source.id = "id-1";
Address address1 = new Address();
address1.city = "SFO";
address1.street = "---";
Address address2 = new Address();
address2.city = "NYC";
address2.street = "---";
source.mapOfComplex = Map.of("a1", address1, "a2", address2);
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("mapOfComplex")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void canQueryDeterministicallyEncrypted() {
Person source = new Person();
source.id = "id-1";
source.ssn = "mySecretSSN";
template.save(source);
Person loaded = template.query(Person.class).matching(where("ssn").is(source.ssn)).firstValue();
assertThat(loaded).isEqualTo(source);
}
@Test // GH-4284
void cannotQueryRandomlyEncrypted() {
Person source = new Person();
source.id = "id-1";
source.wallet = "secret-wallet-id";
template.save(source);
Person loaded = template.query(Person.class).matching(where("wallet").is(source.wallet)).firstValue();
assertThat(loaded).isNull();
}
@Test // GH-4284
void updateSimpleTypeEncryptedFieldWithNewValue() {
Person source = new Person();
source.id = "id-1";
template.save(source);
template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("ssn", "secret-value"))
.first();
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("ssn")).isInstanceOf(Binary.class)) //
.loadedMatches(it -> assertThat(it.getSsn()).isEqualTo("secret-value"));
}
@Test // GH-4284
void updateComplexTypeEncryptedFieldWithNewValue() {
Person source = new Person();
source.id = "id-1";
template.save(source);
Address address = new Address();
address.city = "SFO";
address.street = "---";
template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("address", address)).first();
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("address")).isInstanceOf(Binary.class)) //
.loadedMatches(it -> assertThat(it.getAddress()).isEqualTo(address));
}
@Test // GH-4284
void updateEncryptedFieldInNestedElementWithNewValue() {
Person source = new Person();
source.id = "id-1";
source.encryptedZip = new AddressWithEncryptedZip();
source.encryptedZip.city = "Boston";
source.encryptedZip.street = "central square";
template.save(source);
template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("encryptedZip.zip", "179"))
.first();
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> {
assertThat(it.get("encryptedZip")).isInstanceOf(Document.class);
assertThat(it.get("encryptedZip", Document.class).get("city")).isInstanceOf(String.class);
assertThat(it.get("encryptedZip", Document.class).get("street")).isInstanceOf(String.class);
assertThat(it.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class);
}) //
.loadedMatches(it -> assertThat(it.getEncryptedZip().getZip()).isEqualTo("179"));
}
@Test
void aggregationWithMatch() {
Person person = new Person();
person.id = "id-1";
person.name = "p1-name";
person.ssn = "mySecretSSN";
template.save(person);
AggregationResults<Person> aggregationResults = template.aggregateAndReturn(Person.class)
.by(newAggregation(Person.class, Aggregation.match(where("ssn").is(person.ssn)))).all();
assertThat(aggregationResults.getMappedResults()).containsExactly(person);
}
@Test
void altKeyDetection(@Autowired CachingMongoClientEncryption mongoClientEncryption) throws InterruptedException {
BsonBinary user1key = mongoClientEncryption.getClientEncryption().createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("user-1")));
BsonBinary user2key = mongoClientEncryption.getClientEncryption().createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("user-2")));
Person p1 = new Person();
p1.id = "id-1";
p1.name = "user-1";
p1.ssn = "ssn";
p1.viaAltKeyNameField = "value-1";
Person p2 = new Person();
p2.id = "id-2";
p2.name = "user-2";
p2.viaAltKeyNameField = "value-1";
Person p3 = new Person();
p3.id = "id-3";
p3.name = "user-1";
p3.viaAltKeyNameField = "value-1";
template.save(p1);
template.save(p2);
template.save(p3);
template.execute(Person.class, collection -> {
collection.find(new Document()).forEach(it -> System.out.println(it.toJson()));
return null;
});
// remove the key and invalidate encrypted data
mongoClientEncryption.getClientEncryption().deleteKey(user2key);
// clear the 60 second key cache within the mongo client
mongoClientEncryption.destroy();
assertThat(template.query(Person.class).matching(where("id").is(p1.id)).firstValue()).isEqualTo(p1);
assertThatExceptionOfType(PermissionDeniedDataAccessException.class)
.isThrownBy(() -> template.query(Person.class).matching(where("id").is(p2.id)).firstValue());
}
<T> SaveAndLoadAssert<T> verifyThat(T source) {
return new SaveAndLoadAssert<>(source);
}
class SaveAndLoadAssert<T> {
T source;
Function<T, ?> idProvider;
SaveAndLoadAssert(T source) {
this.source = source;
}
SaveAndLoadAssert<T> identifiedBy(Function<T, ?> idProvider) {
this.idProvider = idProvider;
return this;
}
SaveAndLoadAssert<T> wasSavedAs(Document expected) {
return wasSavedMatching(it -> Assertions.assertThat(it).isEqualTo(expected));
}
SaveAndLoadAssert<T> wasSavedMatching(Consumer<Document> saved) {
AbstractEncryptionTestBase.this.assertSaved(source, idProvider, saved);
return this;
}
SaveAndLoadAssert<T> loadedMatches(Consumer<T> expected) {
AbstractEncryptionTestBase.this.assertLoaded(source, idProvider, expected);
return this;
}
SaveAndLoadAssert<T> loadedIsEqualToSource() {
return loadedIsEqualTo(source);
}
SaveAndLoadAssert<T> loadedIsEqualTo(T expected) {
return loadedMatches(it -> Assertions.assertThat(it).isEqualTo(expected));
}
}
<T> void assertSaved(T source, Function<T, ?> idProvider, Consumer<Document> dbValue) {
Document savedDocument = template.execute(Person.class, collection -> {
MongoNamespace namespace = collection.getNamespace();
try (MongoClient rawClient = MongoClients.create()) {
return rawClient.getDatabase(namespace.getDatabaseName()).getCollection(namespace.getCollectionName())
.find(new Document("_id", idProvider.apply(source))).first();
}
});
dbValue.accept(savedDocument);
}
<T> void assertLoaded(T source, Function<T, ?> idProvider, Consumer<T> loadedValue) {
T loaded = template.query((Class<T>) source.getClass()).matching(where("id").is(idProvider.apply(source)))
.firstValue();
loadedValue.accept(loaded);
}
protected static class EncryptionConfig extends AbstractMongoClientConfiguration {
@Autowired ApplicationContext applicationContext;
@Override
protected String getDatabaseName() {
return "fle-test";
}
@Bean
public MongoClient mongoClient() {
return super.mongoClient();
}
@Override
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
converterConfigurationAdapter
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext));
}
@Bean
MongoEncryptionConverter encryptingConverter(MongoClientEncryption mongoClientEncryption) {
Lazy<BsonBinary> dataKey = Lazy.of(() -> mongoClientEncryption.getClientEncryption().createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("mySuperSecretKey"))));
return new MongoEncryptionConverter(mongoClientEncryption,
EncryptionKeyResolver.annotated((ctx) -> EncryptionKey.keyId(dataKey.get())));
}
@Bean
CachingMongoClientEncryption clientEncryption(ClientEncryptionSettings encryptionSettings) {
return new CachingMongoClientEncryption(() -> ClientEncryptions.create(encryptionSettings));
}
@Bean
ClientEncryptionSettings encryptionSettings(MongoClient mongoClient) {
MongoNamespace keyVaultNamespace = new MongoNamespace("encryption.testKeyVault");
MongoCollection<Document> keyVaultCollection = mongoClient.getDatabase(keyVaultNamespace.getDatabaseName())
.getCollection(keyVaultNamespace.getCollectionName());
keyVaultCollection.drop();
// Ensure that two data keys cannot share the same keyAltName.
keyVaultCollection.createIndex(Indexes.ascending("keyAltNames"),
new IndexOptions().unique(true).partialFilterExpression(Filters.exists("keyAltNames")));
MongoCollection<Document> collection = mongoClient.getDatabase(getDatabaseName()).getCollection("test");
collection.drop(); // Clear old data
byte[] localMasterKey = new byte[96];
new SecureRandom().nextBytes(localMasterKey);
Map<String, Map<String, Object>> kmsProviders = Map.of("local", Map.of("key", localMasterKey));
// Create the ClientEncryption instance
return ClientEncryptionSettings.builder() //
.keyVaultMongoClientSettings(
MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build()) //
.keyVaultNamespace(keyVaultNamespace.getFullName()) //
.kmsProviders(kmsProviders) //
.build();
}
}
static class CachingMongoClientEncryption extends MongoClientEncryption implements DisposableBean {
static final AtomicReference<ClientEncryption> cache = new AtomicReference<>();
CachingMongoClientEncryption(Supplier<ClientEncryption> source) {
super(() -> {
if (cache.get() != null) {
return cache.get();
}
ClientEncryption clientEncryption = source.get();
cache.set(clientEncryption);
return clientEncryption;
});
}
@Override
public void destroy() {
ClientEncryption clientEncryption = cache.get();
if (clientEncryption != null) {
clientEncryption.close();
cache.set(null);
}
}
}
@org.springframework.data.mongodb.core.mapping.Document("test")
static class Person {
String id;
String name;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) //
String ssn;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "mySuperSecretKey") //
String wallet;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // full document must be random
Address address;
AddressWithEncryptedZip encryptedZip;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random
List<String> listOfString;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random
List<Address> listOfComplex;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "/name") //
String viaAltKeyNameField;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) //
Map<String, String> mapOfString;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) //
Map<String, Address> mapOfComplex;
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) //
LocalDate today;
public String getId() {
return this.id;
}
public String getName() {
return this.name;
}
public String getSsn() {
return this.ssn;
}
public String getWallet() {
return this.wallet;
}
public Address getAddress() {
return this.address;
}
public AddressWithEncryptedZip getEncryptedZip() {
return this.encryptedZip;
}
public List<String> getListOfString() {
return this.listOfString;
}
public List<Address> getListOfComplex() {
return this.listOfComplex;
}
public String getViaAltKeyNameField() {
return this.viaAltKeyNameField;
}
public Map<String, String> getMapOfString() {
return this.mapOfString;
}
public Map<String, Address> getMapOfComplex() {
return this.mapOfComplex;
}
public LocalDate getToday() {
return today;
}
public void setId(String id) {
this.id = id;
}
public void setName(String name) {
this.name = name;
}
public void setSsn(String ssn) {
this.ssn = ssn;
}
public void setWallet(String wallet) {
this.wallet = wallet;
}
public void setAddress(Address address) {
this.address = address;
}
public void setEncryptedZip(AddressWithEncryptedZip encryptedZip) {
this.encryptedZip = encryptedZip;
}
public void setListOfString(List<String> listOfString) {
this.listOfString = listOfString;
}
public void setListOfComplex(List<Address> listOfComplex) {
this.listOfComplex = listOfComplex;
}
public void setViaAltKeyNameField(String viaAltKeyNameField) {
this.viaAltKeyNameField = viaAltKeyNameField;
}
public void setMapOfString(Map<String, String> mapOfString) {
this.mapOfString = mapOfString;
}
public void setMapOfComplex(Map<String, Address> mapOfComplex) {
this.mapOfComplex = mapOfComplex;
}
public void setToday(LocalDate today) {
this.today = today;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Person person = (Person) o;
return Objects.equals(id, person.id) && Objects.equals(name, person.name) && Objects.equals(ssn, person.ssn)
&& Objects.equals(wallet, person.wallet) && Objects.equals(address, person.address)
&& Objects.equals(encryptedZip, person.encryptedZip) && Objects.equals(listOfString, person.listOfString)
&& Objects.equals(listOfComplex, person.listOfComplex)
&& Objects.equals(viaAltKeyNameField, person.viaAltKeyNameField)
&& Objects.equals(mapOfString, person.mapOfString) && Objects.equals(mapOfComplex, person.mapOfComplex)
&& Objects.equals(today, person.today);
}
@Override
public int hashCode() {
return Objects.hash(id, name, ssn, wallet, address, encryptedZip, listOfString, listOfComplex, viaAltKeyNameField,
mapOfString, mapOfComplex, today);
}
public String toString() {
return "EncryptionTests.Person(id=" + this.getId() + ", name=" + this.getName() + ", ssn=" + this.getSsn()
+ ", wallet=" + this.getWallet() + ", address=" + this.getAddress() + ", encryptedZip="
+ this.getEncryptedZip() + ", listOfString=" + this.getListOfString() + ", listOfComplex="
+ this.getListOfComplex() + ", viaAltKeyNameField=" + this.getViaAltKeyNameField() + ", mapOfString="
+ this.getMapOfString() + ", mapOfComplex=" + this.getMapOfComplex() + ", today=" + this.getToday() + ")";
}
}
static class Address {
String city;
String street;
public Address() {}
public String getCity() {
return this.city;
}
public String getStreet() {
return this.street;
}
public void setCity(String city) {
this.city = city;
}
public void setStreet(String street) {
this.street = street;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Address address = (Address) o;
return Objects.equals(city, address.city) && Objects.equals(street, address.street);
}
@Override
public int hashCode() {
return Objects.hash(city, street);
}
public String toString() {
return "EncryptionTests.Address(city=" + this.getCity() + ", street=" + this.getStreet() + ")";
}
}
static class AddressWithEncryptedZip extends Address {
@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) String zip;
@Override
public String toString() {
return "AddressWithEncryptedZip{" + "zip='" + zip + '\'' + ", city='" + getCity() + '\'' + ", street='"
+ getStreet() + '\'' + '}';
}
public String getZip() {
return this.zip;
}
public void setZip(String zip) {
this.zip = zip;
}
}
}

View File

@@ -0,0 +1,103 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.encryption;
import java.util.Collections;
import org.bson.BsonBinary;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.convert.PropertyValueConverterFactory;
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter;
import org.springframework.data.mongodb.core.encryption.BypassAutoEncryptionTest.Config;
import org.springframework.data.util.Lazy;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import com.mongodb.AutoEncryptionSettings;
import com.mongodb.ClientEncryptionSettings;
import com.mongodb.MongoClientSettings.Builder;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoClients;
import com.mongodb.client.model.vault.DataKeyOptions;
import com.mongodb.client.vault.ClientEncryptions;
/**
* Encryption tests for client having {@link AutoEncryptionSettings#isBypassAutoEncryption()}.
*
* @author Christoph Strobl
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = Config.class)
public class BypassAutoEncryptionTest extends AbstractEncryptionTestBase {
@Disabled
@Override
void altKeyDetection(@Autowired CachingMongoClientEncryption mongoClientEncryption) throws InterruptedException {
super.altKeyDetection(mongoClientEncryption);
}
@Configuration
static class Config extends EncryptionConfig {
@Autowired ApplicationContext applicationContext;
@Override
protected void configureClientSettings(Builder builder) {
MongoClient mongoClient = MongoClients.create();
ClientEncryptionSettings clientEncryptionSettings = encryptionSettings(mongoClient);
mongoClient.close();
builder.autoEncryptionSettings(AutoEncryptionSettings.builder() //
.kmsProviders(clientEncryptionSettings.getKmsProviders()) //
.keyVaultNamespace(clientEncryptionSettings.getKeyVaultNamespace()) //
.bypassAutoEncryption(true).build());
}
@Override
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
converterConfigurationAdapter
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext));
}
@Bean
@Override
MongoEncryptionConverter encryptingConverter(MongoClientEncryption mongoClientEncryption) {
Lazy<BsonBinary> dataKey = Lazy.of(() -> mongoClientEncryption.getClientEncryption().createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("mySuperSecretKey"))));
return new MongoEncryptionConverter(mongoClientEncryption,
EncryptionKeyResolver.annotated((ctx) -> EncryptionKey.keyId(dataKey.get())));
}
@Bean
@Override
CachingMongoClientEncryption clientEncryption(ClientEncryptionSettings encryptionSettings) {
return new CachingMongoClientEncryption(() -> ClientEncryptions.create(encryptionSettings));
}
}
}

View File

@@ -15,48 +15,30 @@
*/
package org.springframework.data.mongodb.core.encryption;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import lombok.Data;
import lombok.Getter;
import lombok.Setter;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import org.assertj.core.api.Assertions;
import org.bson.BsonBinary;
import org.bson.Document;
import org.bson.types.Binary;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.dao.PermissionDeniedDataAccessException;
import org.springframework.data.convert.PropertyValueConverterFactory;
import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter;
import org.springframework.data.mongodb.core.encryption.EncryptionTests.Config;
import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.util.Lazy;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
@@ -71,7 +53,6 @@ import com.mongodb.client.model.Filters;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.Indexes;
import com.mongodb.client.model.vault.DataKeyOptions;
import com.mongodb.client.vault.ClientEncryption;
import com.mongodb.client.vault.ClientEncryptions;
/**
@@ -79,345 +60,7 @@ import com.mongodb.client.vault.ClientEncryptions;
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = Config.class)
public class EncryptionTests {
@Autowired MongoTemplate template;
@Test // GH-4284
void encryptAndDecryptSimpleValue() {
Person source = new Person();
source.id = "id-1";
source.ssn = "mySecretSSN";
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("ssn")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptComplexValue() {
Person source = new Person();
source.id = "id-1";
source.address = new Address();
source.address.city = "NYC";
source.address.street = "4th Ave.";
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("address")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptValueWithinComplexOne() {
Person source = new Person();
source.id = "id-1";
source.encryptedZip = new AddressWithEncryptedZip();
source.encryptedZip.city = "Boston";
source.encryptedZip.street = "central square";
source.encryptedZip.zip = "1234567890";
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> {
assertThat(it.get("encryptedZip")).isInstanceOf(Document.class);
assertThat(it.get("encryptedZip", Document.class).get("city")).isInstanceOf(String.class);
assertThat(it.get("encryptedZip", Document.class).get("street")).isInstanceOf(String.class);
assertThat(it.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class);
}) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptListOfSimpleValue() {
Person source = new Person();
source.id = "id-1";
source.listOfString = Arrays.asList("spring", "data", "mongodb");
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("listOfString")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptListOfComplexValue() {
Person source = new Person();
source.id = "id-1";
Address address = new Address();
address.city = "SFO";
address.street = "---";
source.listOfComplex = Collections.singletonList(address);
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("listOfComplex")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptMapOfSimpleValues() {
Person source = new Person();
source.id = "id-1";
source.mapOfString = Map.of("k1", "v1", "k2", "v2");
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("mapOfString")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void encryptAndDecryptMapOfComplexValues() {
Person source = new Person();
source.id = "id-1";
Address address1 = new Address();
address1.city = "SFO";
address1.street = "---";
Address address2 = new Address();
address2.city = "NYC";
address2.street = "---";
source.mapOfComplex = Map.of("a1", address1, "a2", address2);
template.save(source);
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("mapOfComplex")).isInstanceOf(Binary.class)) //
.loadedIsEqualToSource();
}
@Test // GH-4284
void canQueryDeterministicallyEncrypted() {
Person source = new Person();
source.id = "id-1";
source.ssn = "mySecretSSN";
template.save(source);
Person loaded = template.query(Person.class).matching(where("ssn").is(source.ssn)).firstValue();
assertThat(loaded).isEqualTo(source);
}
@Test // GH-4284
void cannotQueryRandomlyEncrypted() {
Person source = new Person();
source.id = "id-1";
source.wallet = "secret-wallet-id";
template.save(source);
Person loaded = template.query(Person.class).matching(where("wallet").is(source.wallet)).firstValue();
assertThat(loaded).isNull();
}
@Test // GH-4284
void updateSimpleTypeEncryptedFieldWithNewValue() {
Person source = new Person();
source.id = "id-1";
template.save(source);
template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("ssn", "secret-value"))
.first();
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("ssn")).isInstanceOf(Binary.class)) //
.loadedMatches(it -> assertThat(it.getSsn()).isEqualTo("secret-value"));
}
@Test // GH-4284
void updateComplexTypeEncryptedFieldWithNewValue() {
Person source = new Person();
source.id = "id-1";
template.save(source);
Address address = new Address();
address.city = "SFO";
address.street = "---";
template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("address", address)).first();
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> assertThat(it.get("address")).isInstanceOf(Binary.class)) //
.loadedMatches(it -> assertThat(it.getAddress()).isEqualTo(address));
}
@Test // GH-4284
void updateEncryptedFieldInNestedElementWithNewValue() {
Person source = new Person();
source.id = "id-1";
source.encryptedZip = new AddressWithEncryptedZip();
source.encryptedZip.city = "Boston";
source.encryptedZip.street = "central square";
template.save(source);
template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("encryptedZip.zip", "179"))
.first();
verifyThat(source) //
.identifiedBy(Person::getId) //
.wasSavedMatching(it -> {
assertThat(it.get("encryptedZip")).isInstanceOf(Document.class);
assertThat(it.get("encryptedZip", Document.class).get("city")).isInstanceOf(String.class);
assertThat(it.get("encryptedZip", Document.class).get("street")).isInstanceOf(String.class);
assertThat(it.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class);
}) //
.loadedMatches(it -> assertThat(it.getEncryptedZip().getZip()).isEqualTo("179"));
}
@Test
void aggregationWithMatch() {
Person person = new Person();
person.id = "id-1";
person.name = "p1-name";
person.ssn = "mySecretSSN";
template.save(person);
AggregationResults<Person> aggregationResults = template.aggregateAndReturn(Person.class)
.by(newAggregation(Person.class, Aggregation.match(where("ssn").is(person.ssn)))).all();
assertThat(aggregationResults.getMappedResults()).containsExactly(person);
}
@Test
void altKeyDetection(@Autowired CachingMongoClientEncryption mongoClientEncryption) throws InterruptedException {
BsonBinary user1key = mongoClientEncryption.getClientEncryption().createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("user-1")));
BsonBinary user2key = mongoClientEncryption.getClientEncryption().createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("user-2")));
Person p1 = new Person();
p1.id = "id-1";
p1.name = "user-1";
p1.ssn = "ssn";
p1.viaAltKeyNameField = "value-1";
Person p2 = new Person();
p2.id = "id-2";
p2.name = "user-2";
p2.viaAltKeyNameField = "value-1";
Person p3 = new Person();
p3.id = "id-3";
p3.name = "user-1";
p3.viaAltKeyNameField = "value-1";
template.save(p1);
template.save(p2);
template.save(p3);
template.execute(Person.class, collection -> {
collection.find(new Document()).forEach(it -> System.out.println(it.toJson()));
return null;
});
// remove the key and invalidate encrypted data
mongoClientEncryption.getClientEncryption().deleteKey(user2key);
// clear the 60 second key cache within the mongo client
mongoClientEncryption.destroy();
assertThat(template.query(Person.class).matching(where("id").is(p1.id)).firstValue()).isEqualTo(p1);
assertThatExceptionOfType(PermissionDeniedDataAccessException.class)
.isThrownBy(() -> template.query(Person.class).matching(where("id").is(p2.id)).firstValue());
}
<T> SaveAndLoadAssert<T> verifyThat(T source) {
return new SaveAndLoadAssert<>(source);
}
class SaveAndLoadAssert<T> {
T source;
Function<T, ?> idProvider;
SaveAndLoadAssert(T source) {
this.source = source;
}
SaveAndLoadAssert<T> identifiedBy(Function<T, ?> idProvider) {
this.idProvider = idProvider;
return this;
}
SaveAndLoadAssert<T> wasSavedAs(Document expected) {
return wasSavedMatching(it -> Assertions.assertThat(it).isEqualTo(expected));
}
SaveAndLoadAssert<T> wasSavedMatching(Consumer<Document> saved) {
EncryptionTests.this.assertSaved(source, idProvider, saved);
return this;
}
SaveAndLoadAssert<T> loadedMatches(Consumer<T> expected) {
EncryptionTests.this.assertLoaded(source, idProvider, expected);
return this;
}
SaveAndLoadAssert<T> loadedIsEqualToSource() {
return loadedIsEqualTo(source);
}
SaveAndLoadAssert<T> loadedIsEqualTo(T expected) {
return loadedMatches(it -> Assertions.assertThat(it).isEqualTo(expected));
}
}
<T> void assertSaved(T source, Function<T, ?> idProvider, Consumer<Document> dbValue) {
Document savedDocument = template.execute(Person.class, collection -> {
return collection.find(new Document("_id", idProvider.apply(source))).first();
});
dbValue.accept(savedDocument);
}
<T> void assertLoaded(T source, Function<T, ?> idProvider, Consumer<T> loadedValue) {
T loaded = template.query((Class<T>) source.getClass()).matching(where("id").is(idProvider.apply(source)))
.firstValue();
loadedValue.accept(loaded);
}
public class EncryptionTests extends AbstractEncryptionTestBase {
@Configuration
static class Config extends AbstractMongoClientConfiguration {
@@ -430,6 +73,7 @@ public class EncryptionTests {
}
@Bean
@Override
public MongoClient mongoClient() {
return super.mongoClient();
}
@@ -470,57 +114,21 @@ public class EncryptionTests {
MongoCollection<Document> collection = mongoClient.getDatabase(getDatabaseName()).getCollection("test");
collection.drop(); // Clear old data
final byte[] localMasterKey = new byte[96];
byte[] localMasterKey = new byte[96];
new SecureRandom().nextBytes(localMasterKey);
Map<String, Map<String, Object>> kmsProviders = new HashMap<>() {
{
put("local", new HashMap<>() {
{
put("key", localMasterKey);
}
});
}
};
Map<String, Map<String, Object>> kmsProviders = Map.of("local", Map.of("key", localMasterKey));
// Create the ClientEncryption instance
ClientEncryptionSettings clientEncryptionSettings = ClientEncryptionSettings.builder()
return ClientEncryptionSettings.builder()
.keyVaultMongoClientSettings(
MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build())
.keyVaultNamespace(keyVaultNamespace.getFullName()).kmsProviders(kmsProviders).build();
return clientEncryptionSettings;
MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build()) //
.keyVaultNamespace(keyVaultNamespace.getFullName()) //
.kmsProviders(kmsProviders) //
.build();
}
}
static class CachingMongoClientEncryption extends MongoClientEncryption implements DisposableBean {
static final AtomicReference<ClientEncryption> cache = new AtomicReference<>();
CachingMongoClientEncryption(Supplier<ClientEncryption> source) {
super(() -> {
if (cache.get() != null) {
return cache.get();
}
ClientEncryption clientEncryption = source.get();
cache.set(clientEncryption);
return clientEncryption;
});
}
@Override
public void destroy() {
ClientEncryption clientEncryption = cache.get();
if (clientEncryption != null) {
clientEncryption.close();
cache.set(null);
}
}
}
@Data
@org.springframework.data.mongodb.core.mapping.Document("test")
static class Person {

View File

@@ -1510,9 +1510,16 @@ public abstract class AbstractPersonRepositoryIntegrationTests implements Dirtie
assertThat(result.get(0).getId().equals(bart.getId()));
}
@Test // GH-3395
@Test // GH-3395, GH-4404
void caseInSensitiveInClause() {
assertThat(repository.findByLastnameIgnoreCaseIn("bEAuFoRd", "maTTheWs")).hasSize(3);
repository.save(new Person("the-first", "The First"));
repository.save(new Person("the-first-one", "The First One"));
repository.save(new Person("the-second", "The Second"));
assertThat(repository.findByLastnameIgnoreCaseIn("tHE fIRsT")).hasSize(1);
}
@Test // GH-3395

View File

@@ -15,7 +15,6 @@
*/
package org.springframework.data.mongodb.repository.query;
import static org.mockito.Mockito.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import static org.springframework.data.mongodb.core.query.Query.*;
import static org.springframework.data.mongodb.repository.query.StubParameterAccessor.*;
@@ -25,6 +24,7 @@ import java.lang.reflect.Method;
import java.util.List;
import java.util.regex.Pattern;
import org.bson.BsonRegularExpression;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.junit.jupiter.api.BeforeEach;
@@ -273,6 +273,17 @@ class MongoQueryCreatorUnitTests {
assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i")));
}
@Test // GH-4404
void createsQueryWithFindByInClauseHavingIgnoreCaseCorrectly() {
PartTree tree = new PartTree("findAllByFirstNameInIgnoreCase", Person.class);
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, List.of("da've", "carter")), context);
Query query = creator.createQuery();
assertThat(query).isEqualTo(query(where("firstName")
.in(List.of(new BsonRegularExpression("^\\Qda've\\E$", "i"), new BsonRegularExpression("^carter$", "i")))));
}
@Test // DATAMONGO-770
void createsQueryWithFindByNotIgnoreCaseCorrectly() {

View File

@@ -57,7 +57,7 @@ To create a Spring project in STS:
<repository>
<id>spring-milestone</id>
<name>Spring Maven MILESTONE Repository</name>
<url>https://repo.spring.io/libs-milestone</url>
<url>https://repo.spring.io/milestone</url>
</repository>
</repositories>
----

View File

@@ -1,4 +1,4 @@
Spring Data MongoDB 4.1 GA (2023.0.0)
Spring Data MongoDB 4.1.2 (2023.0.2)
Copyright (c) [2010-2019] Pivotal Software, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License").
@@ -44,6 +44,8 @@ conditions of the subcomponent's license, as noted in the LICENSE file.