Compare commits
4 Commits
4.1.1
...
issue/4346
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
367cd61e35 | ||
|
|
8a9f5361a1 | ||
|
|
5163e544ae | ||
|
|
431512a66c |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
||||
#Tue Jun 13 08:53:53 CEST 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.2/apache-maven-3.9.2-bin.zip
|
||||
#Thu Apr 06 16:16:28 CEST 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.1/apache-maven-3.9.1-bin.zip
|
||||
|
||||
2
Jenkinsfile
vendored
2
Jenkinsfile
vendored
@@ -9,7 +9,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/3.1.x", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
|
||||
36
pom.xml
36
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.1</version>
|
||||
<version>4.2.x-4346-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>3.1.1</version>
|
||||
<version>3.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,7 +26,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>3.1.1</springdata.commons>
|
||||
<springdata.commons>3.2.0-SNAPSHOT</springdata.commons>
|
||||
<mongo>4.9.1</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
@@ -144,8 +144,34 @@
|
||||
</dependencies>
|
||||
|
||||
<repositories>
|
||||
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>https://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.1</version>
|
||||
<version>4.2.x-4346-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.1</version>
|
||||
<version>4.2.x-4346-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -80,4 +80,15 @@
|
||||
|
||||
</build>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>https://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.1</version>
|
||||
<version>4.2.x-4346-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -61,8 +61,7 @@ class ScrollUtils {
|
||||
|
||||
Document sortObject = query.getSortObject();
|
||||
KeysetScrollPosition keyset = query.getKeyset();
|
||||
Direction direction = keyset.getDirection();
|
||||
KeysetScrollDirector director = KeysetScrollDirector.of(direction);
|
||||
KeysetScrollDirector director = KeysetScrollDirector.of(keyset.getDirection());
|
||||
|
||||
List<T> resultsToUse = director.postPostProcessResults(result, query.getLimit());
|
||||
|
||||
@@ -72,7 +71,7 @@ class ScrollUtils {
|
||||
Entity<T> entity = operations.forEntity(last);
|
||||
|
||||
Map<String, Object> keys = entity.extractKeys(sortObject, sourceType);
|
||||
return ScrollPosition.of(keys, direction);
|
||||
return ScrollPosition.forward(keys);
|
||||
};
|
||||
|
||||
return Window.from(resultsToUse, positionFunction, hasMoreElements(result, query.getLimit()));
|
||||
|
||||
@@ -79,7 +79,7 @@ public class ArrayOperators {
|
||||
|
||||
private final @Nullable String fieldReference;
|
||||
private final @Nullable AggregationExpression expression;
|
||||
private final @Nullable Collection<?> values;
|
||||
private final @Nullable Collection values;
|
||||
|
||||
/**
|
||||
* Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}.
|
||||
@@ -214,10 +214,6 @@ public class ArrayOperators {
|
||||
return Filter.filter(fieldReference);
|
||||
}
|
||||
|
||||
if (usesExpression()) {
|
||||
return Filter.filter(expression);
|
||||
}
|
||||
|
||||
Assert.state(values != null, "Values must not be null");
|
||||
return Filter.filter(new ArrayList<>(values));
|
||||
}
|
||||
@@ -321,8 +317,7 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort
|
||||
* order}.
|
||||
* Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort order}.
|
||||
*
|
||||
* @return new instance of {@link SortArray}.
|
||||
* @since 4.0
|
||||
@@ -402,8 +397,8 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that return the last element in the given array. <strong>NOTE:</strong>
|
||||
* Requires MongoDB 4.4 or later.
|
||||
* Creates new {@link AggregationExpression} that return the last element in the given array.
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
|
||||
*
|
||||
* @return new instance of {@link Last}.
|
||||
* @since 3.4
|
||||
@@ -654,19 +649,6 @@ public class ArrayOperators {
|
||||
return new FilterExpressionBuilder().filter(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link AggregationExpression} resolving to an arry to apply the {@code $filter} to.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 4.2
|
||||
*/
|
||||
public static AsBuilder filter(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Field must not be null");
|
||||
return new FilterExpressionBuilder().filter(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@literal values} to apply the {@code $filter} to.
|
||||
*
|
||||
@@ -699,16 +681,7 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
private Object getMappedInput(AggregationOperationContext context) {
|
||||
|
||||
if (input instanceof Field field) {
|
||||
return context.getReference(field).toString();
|
||||
}
|
||||
|
||||
if (input instanceof AggregationExpression expression) {
|
||||
return expression.toDocument(context);
|
||||
}
|
||||
|
||||
return input;
|
||||
return input instanceof Field field ? context.getReference(field).toString() : input;
|
||||
}
|
||||
|
||||
private Object getMappedCondition(AggregationOperationContext context) {
|
||||
@@ -742,15 +715,6 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
AsBuilder filter(Field field);
|
||||
|
||||
/**
|
||||
* Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return
|
||||
* @since 4.1.1
|
||||
*/
|
||||
AsBuilder filter(AggregationExpression expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -833,14 +797,6 @@ public class ArrayOperators {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AsBuilder filter(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null");
|
||||
filter.input = expression;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConditionBuilder as(String variableName) {
|
||||
|
||||
@@ -1377,7 +1333,7 @@ public class ArrayOperators {
|
||||
Assert.notNull(expressions, "PropertyExpressions must not be null");
|
||||
|
||||
return new Reduce(Fields.field(fieldReference), initialValue,
|
||||
Arrays.<AggregationExpression> asList(expressions));
|
||||
Arrays.<AggregationExpression>asList(expressions));
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1734,7 +1690,7 @@ public class ArrayOperators {
|
||||
* @author Christoph Strobl
|
||||
* @author Shashank Sharma
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
|
||||
* @since 2.2
|
||||
*/
|
||||
public static class In extends AbstractAggregationExpression {
|
||||
@@ -1823,7 +1779,7 @@ public class ArrayOperators {
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class ArrayToObject extends AbstractAggregationExpression {
|
||||
@@ -2020,7 +1976,7 @@ public class ArrayOperators {
|
||||
|
||||
/**
|
||||
* Set the order to put elements in.
|
||||
*
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return new instance of {@link SortArray}.
|
||||
*/
|
||||
|
||||
@@ -1571,6 +1571,39 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return newDocument;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object convertToMongoType(@Nullable Object obj, MongoPersistentProperty property) {
|
||||
|
||||
PersistentPropertyAccessor accessor = new MapPersistentPropertyAccessor();
|
||||
accessor.setProperty(property, obj);
|
||||
|
||||
Document newDocument = new Document();
|
||||
DocumentAccessor dbObjectAccessor = new DocumentAccessor(newDocument);
|
||||
|
||||
if (property.isIdProperty() || !property.isWritable()) {
|
||||
return obj;
|
||||
}
|
||||
if (property.isAssociation()) {
|
||||
|
||||
writeAssociation(property.getRequiredAssociation(), accessor, dbObjectAccessor);
|
||||
return dbObjectAccessor.get(property);
|
||||
}
|
||||
|
||||
Object value = obj;
|
||||
|
||||
if (value == null) {
|
||||
if (property.writeNullValues()) {
|
||||
dbObjectAccessor.put(property, null);
|
||||
}
|
||||
} else if (!conversions.isSimpleType(value.getClass())) {
|
||||
writePropertyInternal(value, dbObjectAccessor, property, accessor);
|
||||
} else {
|
||||
writeSimpleInternal(value, newDocument, property, accessor);
|
||||
}
|
||||
return dbObjectAccessor.get(property);
|
||||
}
|
||||
|
||||
// TODO: hide in 4.0
|
||||
public List<Object> maybeConvertList(Iterable<?> source, @Nullable TypeInformation<?> typeInformation) {
|
||||
|
||||
|
||||
@@ -41,14 +41,16 @@ import org.springframework.data.convert.PropertyValueConversions;
|
||||
import org.springframework.data.convert.PropertyValueConverter;
|
||||
import org.springframework.data.convert.PropertyValueConverterFactory;
|
||||
import org.springframework.data.convert.PropertyValueConverterRegistrar;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.SimplePropertyValueConversions;
|
||||
import org.springframework.data.convert.ValueConversionContext;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* Value object to capture custom conversion. {@link MongoCustomConversions} also act as factory for
|
||||
@@ -332,9 +334,40 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
svc.init();
|
||||
}
|
||||
|
||||
// Move to data-commons?
|
||||
PropertyValueConversions pvc = new PropertyValueConversions() {
|
||||
|
||||
@Override
|
||||
public boolean hasValueConverter(PersistentProperty<?> property) {
|
||||
return propertyValueConversions.hasValueConverter(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <DV, SV, P extends PersistentProperty<P>, VCC extends ValueConversionContext<P>> PropertyValueConverter<DV, SV, VCC> getValueConverter(
|
||||
P property) {
|
||||
|
||||
return new PropertyValueConverter<DV, SV, VCC>() {
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public DV read(SV value, VCC context) {
|
||||
return (DV) propertyValueConversions.getValueConverter(property).read(value, context);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public SV write(DV value, VCC context) {
|
||||
if (ClassUtils.isAssignable(property.getType(), value.getClass())) {
|
||||
return (SV) propertyValueConversions.getValueConverter(property).write(value, context);
|
||||
}
|
||||
return (SV) value;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
if (!useNativeDriverJavaTimeCodecs) {
|
||||
return new ConverterConfiguration(STORE_CONVERSIONS, this.customConverters, convertiblePair -> true,
|
||||
this.propertyValueConversions);
|
||||
return new ConverterConfiguration(STORE_CONVERSIONS, this.customConverters, convertiblePair -> true, pvc);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -359,10 +392,9 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}
|
||||
|
||||
return true;
|
||||
}, this.propertyValueConversions);
|
||||
}, pvc);
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
private enum DateToUtcLocalDateTimeConverter implements Converter<Date, LocalDateTime> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -372,7 +404,6 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
private enum DateToUtcLocalTimeConverter implements Converter<Date, LocalTime> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -382,7 +413,6 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
private enum DateToUtcLocalDateConverter implements Converter<Date, LocalDate> {
|
||||
INSTANCE;
|
||||
|
||||
|
||||
@@ -59,6 +59,11 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
|
||||
@Nullable
|
||||
Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation<?> typeInformation);
|
||||
|
||||
@Nullable
|
||||
default Object convertToMongoType(@Nullable Object obj, MongoPersistentProperty property) {
|
||||
return convertToMongoType(obj, property.getTypeInformation());
|
||||
}
|
||||
|
||||
default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity<?> entity) {
|
||||
return convertToMongoType(obj, entity.getTypeInformation());
|
||||
}
|
||||
|
||||
@@ -89,22 +89,25 @@ public enum MongoRegexCreator {
|
||||
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, matcherType);
|
||||
|
||||
return switch (matcherType) {
|
||||
case STARTING_WITH -> String.format("^%s", regex);
|
||||
case ENDING_WITH -> String.format("%s$", regex);
|
||||
case CONTAINING -> String.format(".*%s.*", regex);
|
||||
case EXACT -> String.format("^%s$", regex);
|
||||
default -> regex;
|
||||
};
|
||||
switch (matcherType) {
|
||||
case STARTING_WITH:
|
||||
return String.format("^%s", regex);
|
||||
case ENDING_WITH:
|
||||
return String.format("%s$", regex);
|
||||
case CONTAINING:
|
||||
return String.format(".*%s.*", regex);
|
||||
case EXACT:
|
||||
return String.format("^%s$", regex);
|
||||
default:
|
||||
return regex;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source
|
||||
* @return
|
||||
* @since 2.2.14
|
||||
* @deprecated since 4.1.1
|
||||
*/
|
||||
@Deprecated(since = "4.1.1", forRemoval = true)
|
||||
public Object toCaseInsensitiveMatch(Object source) {
|
||||
return source instanceof String stringValue ? new BsonRegularExpression(Pattern.quote(stringValue), "i") : source;
|
||||
}
|
||||
|
||||
@@ -87,14 +87,11 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
this.bucket = bucket;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Object metadata) {
|
||||
return store(content, filename, contentType, toDocument(metadata));
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> T store(GridFsObject<T, InputStream> upload) {
|
||||
|
||||
GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(),
|
||||
@@ -113,7 +110,6 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
return upload.getFileId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFSFindIterable find(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
@@ -134,12 +130,10 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
return iterable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFSFile findOne(Query query) {
|
||||
return find(query).first();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Query query) {
|
||||
|
||||
for (GridFSFile gridFSFile : find(query)) {
|
||||
@@ -147,12 +141,10 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClassLoader getClassLoader() {
|
||||
return dbFactory.getClass().getClassLoader();
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFsResource getResource(String location) {
|
||||
|
||||
return Optional.ofNullable(findOne(query(whereFilename().is(location)))) //
|
||||
@@ -160,7 +152,6 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
.orElseGet(() -> GridFsResource.absent(location));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFsResource getResource(GridFSFile file) {
|
||||
|
||||
Assert.notNull(file, "GridFSFile must not be null");
|
||||
@@ -168,7 +159,6 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
return new GridFsResource(file, getGridFs().openDownloadStream(file.getId()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFsResource[] getResources(String locationPattern) {
|
||||
|
||||
if (!StringUtils.hasText(locationPattern)) {
|
||||
@@ -194,8 +184,6 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
|
||||
private GridFSBucket getGridFs() {
|
||||
|
||||
Assert.notNull(dbFactory, "MongoDatabaseFactory must not be null");
|
||||
|
||||
MongoDatabase db = dbFactory.getMongoDatabase();
|
||||
return bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket);
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param converter must not be {@literal null}.
|
||||
* @param bucket can be {@literal null}.
|
||||
* @param bucket
|
||||
*/
|
||||
public ReactiveGridFsTemplate(ReactiveMongoDatabaseFactory dbFactory, MongoConverter converter,
|
||||
@Nullable String bucket) {
|
||||
@@ -96,7 +96,7 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
* @param dataBufferFactory must not be {@literal null}.
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param converter must not be {@literal null}.
|
||||
* @param bucket can be {@literal null}.
|
||||
* @param bucket
|
||||
*/
|
||||
public ReactiveGridFsTemplate(DataBufferFactory dataBufferFactory, ReactiveMongoDatabaseFactory dbFactory,
|
||||
MongoConverter converter, @Nullable String bucket) {
|
||||
@@ -117,8 +117,6 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
return store(content, filename, contentType, toDocument(metadata));
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T> Mono<T> store(GridFsObject<T, Publisher<DataBuffer>> upload) {
|
||||
|
||||
GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(),
|
||||
@@ -276,7 +274,6 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
this.sortObject = sortObject;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFSFindPublisher doInBucket(GridFSBucket bucket) {
|
||||
|
||||
GridFSFindPublisher findPublisher = bucket.find(queryObject).sort(sortObject);
|
||||
@@ -314,8 +311,21 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
}
|
||||
}
|
||||
|
||||
private record UploadCallback(BsonValue fileId, String filename, Publisher<ByteBuffer> source,
|
||||
GridFSUploadOptions uploadOptions) implements ReactiveBucketCallback<Void> {
|
||||
private static class UploadCallback implements ReactiveBucketCallback<Void> {
|
||||
|
||||
private final BsonValue fileId;
|
||||
private final String filename;
|
||||
private final Publisher<ByteBuffer> source;
|
||||
private final GridFSUploadOptions uploadOptions;
|
||||
|
||||
public UploadCallback(BsonValue fileId, String filename, Publisher<ByteBuffer> source,
|
||||
GridFSUploadOptions uploadOptions) {
|
||||
|
||||
this.fileId = fileId;
|
||||
this.filename = filename;
|
||||
this.source = source;
|
||||
this.uploadOptions = uploadOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFSUploadPublisher<Void> doInBucket(GridFSBucket bucket) {
|
||||
@@ -323,8 +333,19 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
}
|
||||
}
|
||||
|
||||
private record AutoIdCreatingUploadCallback(String filename, Publisher<ByteBuffer> source,
|
||||
GridFSUploadOptions uploadOptions) implements ReactiveBucketCallback<ObjectId> {
|
||||
private static class AutoIdCreatingUploadCallback implements ReactiveBucketCallback<ObjectId> {
|
||||
|
||||
private final String filename;
|
||||
private final Publisher<ByteBuffer> source;
|
||||
private final GridFSUploadOptions uploadOptions;
|
||||
|
||||
public AutoIdCreatingUploadCallback(String filename, Publisher<ByteBuffer> source,
|
||||
GridFSUploadOptions uploadOptions) {
|
||||
|
||||
this.filename = filename;
|
||||
this.source = source;
|
||||
this.uploadOptions = uploadOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GridFSUploadPublisher<ObjectId> doInBucket(GridFSBucket bucket) {
|
||||
@@ -332,7 +353,13 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
}
|
||||
}
|
||||
|
||||
private record DeleteCallback(BsonValue id) implements ReactiveBucketCallback<Void> {
|
||||
private static class DeleteCallback implements ReactiveBucketCallback<Void> {
|
||||
|
||||
private final BsonValue id;
|
||||
|
||||
public DeleteCallback(BsonValue id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Publisher<Void> doInBucket(GridFSBucket bucket) {
|
||||
|
||||
@@ -36,9 +36,11 @@ import org.springframework.data.repository.query.ParameterAccessor;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom {@link ParameterAccessor} that uses a {@link MongoWriter} to serialize parameters into Mongo format.
|
||||
@@ -91,7 +93,7 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
|
||||
}
|
||||
|
||||
public Object getBindableValue(int index) {
|
||||
return getConvertedValue(delegate.getBindableValue(index), null);
|
||||
return getConvertedValue(delegate.getBindableValue(index), (TypeInformation<?>) null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -129,6 +131,11 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
|
||||
return writer.convertToMongoType(value, typeInformation == null ? null : typeInformation.getActualType());
|
||||
}
|
||||
|
||||
|
||||
public Object getConvertedValue(Object value, MongoPersistentProperty property) {
|
||||
return writer.convertToMongoType(value, property);
|
||||
}
|
||||
|
||||
public boolean hasBindableNullValue() {
|
||||
return delegate.hasBindableNullValue();
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.BsonRegularExpression;
|
||||
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Range.Bound;
|
||||
import org.springframework.data.domain.Sort;
|
||||
@@ -52,7 +52,6 @@ import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -69,7 +68,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(MongoQueryCreator.class);
|
||||
|
||||
private final MongoParameterAccessor accessor;
|
||||
private final ConvertingParameterAccessor accessor;
|
||||
private final MappingContext<?, MongoPersistentProperty> context;
|
||||
private final boolean isGeoNearQuery;
|
||||
|
||||
@@ -346,6 +345,17 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
"Argument for creating $regex pattern for property '%s' must not be null", part.getProperty().getSegment()));
|
||||
}
|
||||
|
||||
try {
|
||||
PersistentPropertyPath<MongoPersistentProperty> persistentPropertyPath = context.getPersistentPropertyPath(part.getProperty());
|
||||
MongoPersistentProperty leafProperty = persistentPropertyPath.getLeafProperty();/// maybe a call back here
|
||||
if (leafProperty != null) {
|
||||
Object convertedValue = accessor.getConvertedValue(value.toString(), leafProperty);
|
||||
return criteria.regex(toLikeRegex(convertedValue.toString(), part), toRegexOptions(part));
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
System.err.print(ex);
|
||||
}
|
||||
|
||||
return criteria.regex(toLikeRegex(value.toString(), part), toRegexOptions(part));
|
||||
}
|
||||
|
||||
@@ -353,7 +363,6 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @param part
|
||||
* @return the regex options or {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
private String toRegexOptions(Part part) {
|
||||
|
||||
String regexOptions = null;
|
||||
@@ -392,18 +401,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
Streamable<?> streamable = asStreamable(iterator.next());
|
||||
if (!isSimpleComparisionPossible(part)) {
|
||||
|
||||
MatchMode matchMode = toMatchMode(part.getType());
|
||||
String regexOptions = toRegexOptions(part);
|
||||
|
||||
streamable = streamable.map(it -> {
|
||||
if (it instanceof String value) {
|
||||
|
||||
return new BsonRegularExpression(MongoRegexCreator.INSTANCE.toRegularExpression(value, matchMode),
|
||||
regexOptions);
|
||||
}
|
||||
return it;
|
||||
});
|
||||
streamable = streamable.map(MongoRegexCreator.INSTANCE::toCaseInsensitiveMatch);
|
||||
}
|
||||
|
||||
return streamable.toList();
|
||||
@@ -494,7 +492,6 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return MatchMode.REGEX;
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
case SIMPLE_PROPERTY:
|
||||
case IN:
|
||||
return MatchMode.EXACT;
|
||||
default:
|
||||
return MatchMode.DEFAULT;
|
||||
|
||||
@@ -13,14 +13,16 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.MongoConversionContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoValueConverter;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class ReversingValueConverter implements MongoValueConverter<String, String> {
|
||||
public class ReversingValueConverter implements MongoValueConverter<String, String> {
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
@@ -193,30 +193,9 @@ class MongoTemplateScrollTests {
|
||||
window = template.scroll(q.with(window.positionAt(0)).limit(2), Person.class);
|
||||
|
||||
assertThat(window).hasSize(2);
|
||||
assertThat(window).containsOnly(jane_20, jane_40);
|
||||
assertThat(window.hasNext()).isFalse();
|
||||
assertThat(window.isLast()).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4413
|
||||
void shouldAllowInitialBackwardSort() {
|
||||
|
||||
Person jane_20 = new Person("Jane", 20);
|
||||
Person jane_40 = new Person("Jane", 40);
|
||||
Person jane_42 = new Person("Jane", 42);
|
||||
Person john20 = new Person("John", 20);
|
||||
Person john40_1 = new Person("John", 40);
|
||||
Person john40_2 = new Person("John", 40);
|
||||
|
||||
template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42));
|
||||
Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age"));
|
||||
q.with(ScrollPosition.keyset().backward()).limit(3);
|
||||
|
||||
Window<Person> window = template.scroll(q, Person.class);
|
||||
assertThat(window).containsExactly(john20, john40_1, john40_2);
|
||||
|
||||
window = template.scroll(q.with(window.positionAt(0)).limit(3), Person.class);
|
||||
assertThat(window).containsExactly(jane_20, jane_40, jane_42);
|
||||
assertThat(window).containsOnly(john20, john40_1);
|
||||
assertThat(window.hasNext()).isTrue();
|
||||
assertThat(window.isLast()).isFalse();
|
||||
}
|
||||
|
||||
@ParameterizedTest // GH-4308
|
||||
|
||||
@@ -69,8 +69,6 @@ import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.StringOperators;
|
||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
import org.springframework.data.mongodb.core.index.Index;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
@@ -1791,30 +1789,6 @@ public class MongoTemplateTests {
|
||||
assertThat(result.get(0).date).isNotNull();
|
||||
}
|
||||
|
||||
@Test // GH-4390
|
||||
void nativeDriverDateTimeCodecShouldBeApplied/*when configured*/() {
|
||||
|
||||
MongoTestTemplate ops = new MongoTestTemplate(cfg -> {
|
||||
cfg.configureConversion(conversion -> {
|
||||
conversion.customConversions(
|
||||
MongoCustomConversions.create(MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs));
|
||||
});
|
||||
});
|
||||
|
||||
TypeWithDate source = new TypeWithDate();
|
||||
source.id = "id-1";
|
||||
source.date = Date.from(Instant.now());
|
||||
|
||||
ops.save(source);
|
||||
|
||||
var dbDate = ops.execute(TypeWithDate.class,
|
||||
collection -> collection.find(new org.bson.Document("_id", source.id)).first().get("date"));
|
||||
|
||||
TypeWithDate target = ops.findOne(query(where("date").is(source.date)), TypeWithDate.class);
|
||||
|
||||
assertThat(target.date).isEqualTo(source.date).isEqualTo(dbDate);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-540
|
||||
public void findOneAfterUpsertForNonExistingObjectReturnsTheInsertedObject() {
|
||||
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.AssertionsForClassTypes.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.data.domain.KeysetScrollPosition;
|
||||
import org.springframework.data.domain.ScrollPosition;
|
||||
import org.springframework.data.domain.Window;
|
||||
import org.springframework.data.mongodb.core.EntityOperations.Entity;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link ScrollUtils}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class ScrollUtilsUnitTests {
|
||||
|
||||
@Test // GH-4413
|
||||
void positionShouldRetainScrollDirection() {
|
||||
|
||||
Query query = new Query();
|
||||
query.with(ScrollPosition.keyset().backward());
|
||||
EntityOperations entityOperationsMock = mock(EntityOperations.class);
|
||||
Entity entityMock = mock(Entity.class);
|
||||
|
||||
when(entityOperationsMock.forEntity(any())).thenReturn(entityMock);
|
||||
when(entityMock.extractKeys(any(), any())).thenReturn(Map.of("k", "v"));
|
||||
|
||||
Window<Integer> window = ScrollUtils.createWindow(query, new ArrayList<>(List.of(1, 2, 3)), Integer.class,
|
||||
entityOperationsMock);
|
||||
|
||||
assertThat(window.positionAt(0)).isInstanceOf(KeysetScrollPosition.class);
|
||||
assertThat(((KeysetScrollPosition) window.positionAt(0)).scrollsBackward()).isTrue();
|
||||
}
|
||||
}
|
||||
@@ -117,23 +117,6 @@ class FilterExpressionUnitTests {
|
||||
assertThat($filter).isEqualTo(new Document(expected));
|
||||
}
|
||||
|
||||
@Test // GH-4394
|
||||
void filterShouldAcceptExpression() {
|
||||
|
||||
Document $filter = ArrayOperators.arrayOf(ObjectOperators.valueOf("data.metadata").toArray()).filter().as("item")
|
||||
.by(ComparisonOperators.valueOf("item.price").greaterThan("field-1")).toDocument(Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
Document expected = Document.parse("""
|
||||
{ $filter : {
|
||||
input: { $objectToArray: "$data.metadata" },
|
||||
as: "item",
|
||||
cond: { $gt: [ "$$item.price", "$field-1" ] }
|
||||
}}
|
||||
""");
|
||||
|
||||
assertThat($filter).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private Document extractFilterOperatorFromDocument(Document source) {
|
||||
|
||||
List<Object> pipeline = DocumentTestUtils.getAsDBList(source, "pipeline");
|
||||
|
||||
@@ -64,16 +64,6 @@ class MongoCustomConversionsUnitTests {
|
||||
assertThat(conversions.getPropertyValueConversions().hasValueConverter(persistentProperty)).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4390
|
||||
void doesNotReturnConverterForNativeTimeTimeIfUsingDriverCodec() {
|
||||
|
||||
MongoCustomConversions conversions = MongoCustomConversions.create(config -> {
|
||||
config.useNativeDriverJavaTimeCodecs();
|
||||
});
|
||||
|
||||
assertThat(conversions.getCustomWriteTarget(Date.class)).isEmpty();
|
||||
}
|
||||
|
||||
static class DateToZonedDateTimeConverter implements Converter<Date, ZonedDateTime> {
|
||||
|
||||
@Override
|
||||
|
||||
@@ -29,6 +29,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
@@ -42,6 +43,7 @@ import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.ReversingValueConverter;
|
||||
import org.springframework.data.mongodb.core.DocumentTestUtils;
|
||||
import org.springframework.data.mongodb.core.Person;
|
||||
import org.springframework.data.mongodb.core.aggregation.ComparisonOperators;
|
||||
@@ -1455,6 +1457,15 @@ public class QueryMapperUnitTests {
|
||||
assertThat(mappedObject).isEqualTo(new org.bson.Document("text", "eulav"));
|
||||
}
|
||||
|
||||
@Test // GH-4346
|
||||
void ignoresValueConverterForNonMatchingType() {
|
||||
|
||||
org.bson.Document source = new org.bson.Document("text", new BsonRegularExpression("value"));
|
||||
org.bson.Document mappedObject = mapper.getMappedObject(source, context.getPersistentEntity(WithPropertyValueConverter.class));
|
||||
|
||||
assertThat(mappedObject).isEqualTo(source);
|
||||
}
|
||||
|
||||
@Test // GH-2750
|
||||
void mapsAggregationExpression() {
|
||||
|
||||
|
||||
@@ -47,9 +47,8 @@ import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.domain.Sort.Order;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mongodb.ReversingValueConverter;
|
||||
import org.springframework.data.mongodb.core.DocumentTestUtils;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
|
||||
@@ -1510,16 +1510,9 @@ public abstract class AbstractPersonRepositoryIntegrationTests implements Dirtie
|
||||
assertThat(result.get(0).getId().equals(bart.getId()));
|
||||
}
|
||||
|
||||
@Test // GH-3395, GH-4404
|
||||
@Test // GH-3395
|
||||
void caseInSensitiveInClause() {
|
||||
|
||||
assertThat(repository.findByLastnameIgnoreCaseIn("bEAuFoRd", "maTTheWs")).hasSize(3);
|
||||
|
||||
repository.save(new Person("the-first", "The First"));
|
||||
repository.save(new Person("the-first-one", "The First One"));
|
||||
repository.save(new Person("the-second", "The Second"));
|
||||
|
||||
assertThat(repository.findByLastnameIgnoreCaseIn("tHE fIRsT")).hasSize(1);
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
@@ -1644,4 +1637,19 @@ public abstract class AbstractPersonRepositoryIntegrationTests implements Dirtie
|
||||
assertThat(repository.findById(dave.getId()).map(Person::getShippingAddresses))
|
||||
.contains(Collections.singleton(address));
|
||||
}
|
||||
|
||||
@Test // GH-4346
|
||||
@DirtiesState
|
||||
void findCreatingRegexWithValueConverterWorks() {
|
||||
|
||||
Person bart = new Person("bart", "simpson");
|
||||
bart.setNickName("bartman");
|
||||
|
||||
operations.save(bart);
|
||||
|
||||
List<Person> result = repository.findByNickNameContains("artma");
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getId().equals(bart.getId()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,9 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.springframework.data.convert.ValueConverter;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.ReversingValueConverter;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexed;
|
||||
import org.springframework.data.mongodb.core.index.Indexed;
|
||||
@@ -78,6 +80,9 @@ public class Person extends Contact {
|
||||
|
||||
@DocumentReference User spiritAnimal;
|
||||
|
||||
@ValueConverter(ReversingValueConverter.class)
|
||||
String nickName;
|
||||
|
||||
int visits;
|
||||
|
||||
public Person() {
|
||||
@@ -325,6 +330,14 @@ public class Person extends Contact {
|
||||
this.spiritAnimal = spiritAnimal;
|
||||
}
|
||||
|
||||
public String getNickName() {
|
||||
return nickName;
|
||||
}
|
||||
|
||||
public void setNickName(String nickName) {
|
||||
this.nickName = nickName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
|
||||
@@ -465,4 +465,5 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
|
||||
List<Person> findBySpiritAnimal(User user);
|
||||
|
||||
List<Person> findByNickNameContains(String nickName);
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
import static org.springframework.data.mongodb.repository.query.StubParameterAccessor.*;
|
||||
@@ -24,11 +25,11 @@ import java.lang.reflect.Method;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.data.convert.ValueConverter;
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Range.Bound;
|
||||
import org.springframework.data.geo.Distance;
|
||||
@@ -40,7 +41,9 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.Person;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConversionContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoValueConverter;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonLineString;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
@@ -57,6 +60,7 @@ import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.repository.Repository;
|
||||
import org.springframework.data.repository.core.support.DefaultRepositoryMetadata;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Unit test for {@link MongoQueryCreator}.
|
||||
@@ -273,17 +277,6 @@ class MongoQueryCreatorUnitTests {
|
||||
assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i")));
|
||||
}
|
||||
|
||||
@Test // GH-4404
|
||||
void createsQueryWithFindByInClauseHavingIgnoreCaseCorrectly() {
|
||||
|
||||
PartTree tree = new PartTree("findAllByFirstNameInIgnoreCase", Person.class);
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, List.of("da've", "carter")), context);
|
||||
|
||||
Query query = creator.createQuery();
|
||||
assertThat(query).isEqualTo(query(where("firstName")
|
||||
.in(List.of(new BsonRegularExpression("^\\Qda've\\E$", "i"), new BsonRegularExpression("^carter$", "i")))));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-770
|
||||
void createsQueryWithFindByNotIgnoreCaseCorrectly() {
|
||||
|
||||
@@ -669,6 +662,16 @@ class MongoQueryCreatorUnitTests {
|
||||
assertThat(creator.createQuery()).isEqualTo(query(where("location").nearSphere(point).maxDistance(1000.0D)));
|
||||
}
|
||||
|
||||
@Test // GH-4346
|
||||
void likeQueriesShouldApplyPropertyValueConverterWhenCreatingRegex() {
|
||||
|
||||
PartTree tree = new PartTree("findByTextStartingWith", WithValueConverter.class);
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "spring"), context);
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(query).isEqualTo(query(where("text").regex("^gnirps")));
|
||||
}
|
||||
|
||||
interface PersonRepository extends Repository<Person, Long> {
|
||||
|
||||
List<Person> findByLocationNearAndFirstname(Point location, Distance maxDistance, String firstname);
|
||||
@@ -704,4 +707,34 @@ class MongoQueryCreatorUnitTests {
|
||||
|
||||
@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE) Point geo;
|
||||
}
|
||||
|
||||
static class WithValueConverter {
|
||||
|
||||
@ValueConverter(ReversingValueConverter.class)
|
||||
String text;
|
||||
}
|
||||
|
||||
static class ReversingValueConverter implements MongoValueConverter<String, String> {
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String read(@Nullable String value, MongoConversionContext context) {
|
||||
return reverse(value);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String write(@Nullable String value, MongoConversionContext context) {
|
||||
return reverse(value);
|
||||
}
|
||||
|
||||
private String reverse(String source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new StringBuilder(source).reverse().toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,10 @@
|
||||
|
||||
<appender name="no-op" class="ch.qos.logback.core.helpers.NOPAppender" />
|
||||
|
||||
<!--
|
||||
<logger name="org.mongodb.driver.protocol" level="DEBUG" />
|
||||
-->
|
||||
|
||||
<!--
|
||||
<logger name="org.springframework" level="debug" />
|
||||
-->
|
||||
|
||||
@@ -57,7 +57,7 @@ To create a Spring project in STS:
|
||||
<repository>
|
||||
<id>spring-milestone</id>
|
||||
<name>Spring Maven MILESTONE Repository</name>
|
||||
<url>https://repo.spring.io/milestone</url>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
----
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 4.1.1 (2023.0.1)
|
||||
Spring Data MongoDB 4.1 GA (2023.0.0)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -45,6 +45,5 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user