Compare commits
14 Commits
issue/3380
...
4.1.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f560f2ec3 | ||
|
|
752d8c821e | ||
|
|
b478e7068b | ||
|
|
02fe73d052 | ||
|
|
29021d132f | ||
|
|
4d51d27cda | ||
|
|
e2dc76eea3 | ||
|
|
aecfd45968 | ||
|
|
0c36929833 | ||
|
|
bedd94fe17 | ||
|
|
b85b53443b | ||
|
|
99070162bb | ||
|
|
9218b22d12 | ||
|
|
e5aab51add |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
||||
#Thu Apr 06 16:16:28 CEST 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.1/apache-maven-3.9.1-bin.zip
|
||||
#Tue Jun 13 08:53:53 CEST 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.2/apache-maven-3.9.2-bin.zip
|
||||
|
||||
2
CI.adoc
2
CI.adoc
@@ -16,7 +16,7 @@ All of these use cases are great reasons to essentially run what the CI server d
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3:latest /bin/bash`
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
|
||||
2
Jenkinsfile
vendored
2
Jenkinsfile
vendored
@@ -9,7 +9,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/3.1.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
|
||||
@@ -10,7 +10,7 @@ All of these use cases are great reasons to essentially run what Concourse does
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash`
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
@@ -23,7 +23,7 @@ Since the container is binding to your source, you can make edits from your IDE
|
||||
If you need to test the `build.sh` script, do this:
|
||||
|
||||
1. `mkdir /tmp/spring-data-mongodb-artifactory`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
|
||||
artifactory output directory at `spring-data-mongodb-artifactory`.
|
||||
@@ -36,4 +36,4 @@ IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about
|
||||
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
|
||||
and deliver them to artifactory.
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
36
pom.xml
36
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.2.x-3380-SNAPSHOT</version>
|
||||
<version>4.1.1</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>3.2.0-SNAPSHOT</version>
|
||||
<version>3.1.1</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,7 +26,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>3.2.0-SNAPSHOT</springdata.commons>
|
||||
<springdata.commons>3.1.1</springdata.commons>
|
||||
<mongo>4.9.1</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
@@ -144,34 +144,8 @@
|
||||
</dependencies>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
|
||||
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>https://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.2.x-3380-SNAPSHOT</version>
|
||||
<version>4.1.1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.2.x-3380-SNAPSHOT</version>
|
||||
<version>4.1.1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -80,15 +80,4 @@
|
||||
|
||||
</build>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>https://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.2.x-3380-SNAPSHOT</version>
|
||||
<version>4.1.1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
@@ -35,10 +34,7 @@ import org.springframework.data.mongodb.MongoManagedTypes;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MappingConfig;
|
||||
import org.springframework.data.mongodb.core.mapping.MappingConfig.MappingRuleCustomizer;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -91,16 +87,10 @@ public abstract class MongoConfigurationSupport {
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
mappingContext.setMappingConfig(mappingConfig());
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public MappingConfig mappingConfig() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link MongoManagedTypes}.
|
||||
* @throws ClassNotFoundException
|
||||
|
||||
@@ -61,7 +61,8 @@ class ScrollUtils {
|
||||
|
||||
Document sortObject = query.getSortObject();
|
||||
KeysetScrollPosition keyset = query.getKeyset();
|
||||
KeysetScrollDirector director = KeysetScrollDirector.of(keyset.getDirection());
|
||||
Direction direction = keyset.getDirection();
|
||||
KeysetScrollDirector director = KeysetScrollDirector.of(direction);
|
||||
|
||||
List<T> resultsToUse = director.postPostProcessResults(result, query.getLimit());
|
||||
|
||||
@@ -71,7 +72,7 @@ class ScrollUtils {
|
||||
Entity<T> entity = operations.forEntity(last);
|
||||
|
||||
Map<String, Object> keys = entity.extractKeys(sortObject, sourceType);
|
||||
return ScrollPosition.forward(keys);
|
||||
return ScrollPosition.of(keys, direction);
|
||||
};
|
||||
|
||||
return Window.from(resultsToUse, positionFunction, hasMoreElements(result, query.getLimit()));
|
||||
|
||||
@@ -79,7 +79,7 @@ public class ArrayOperators {
|
||||
|
||||
private final @Nullable String fieldReference;
|
||||
private final @Nullable AggregationExpression expression;
|
||||
private final @Nullable Collection values;
|
||||
private final @Nullable Collection<?> values;
|
||||
|
||||
/**
|
||||
* Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}.
|
||||
@@ -214,6 +214,10 @@ public class ArrayOperators {
|
||||
return Filter.filter(fieldReference);
|
||||
}
|
||||
|
||||
if (usesExpression()) {
|
||||
return Filter.filter(expression);
|
||||
}
|
||||
|
||||
Assert.state(values != null, "Values must not be null");
|
||||
return Filter.filter(new ArrayList<>(values));
|
||||
}
|
||||
@@ -317,7 +321,8 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort order}.
|
||||
* Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort
|
||||
* order}.
|
||||
*
|
||||
* @return new instance of {@link SortArray}.
|
||||
* @since 4.0
|
||||
@@ -397,8 +402,8 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that return the last element in the given array.
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
|
||||
* Creates new {@link AggregationExpression} that return the last element in the given array. <strong>NOTE:</strong>
|
||||
* Requires MongoDB 4.4 or later.
|
||||
*
|
||||
* @return new instance of {@link Last}.
|
||||
* @since 3.4
|
||||
@@ -649,6 +654,19 @@ public class ArrayOperators {
|
||||
return new FilterExpressionBuilder().filter(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link AggregationExpression} resolving to an arry to apply the {@code $filter} to.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 4.2
|
||||
*/
|
||||
public static AsBuilder filter(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Field must not be null");
|
||||
return new FilterExpressionBuilder().filter(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@literal values} to apply the {@code $filter} to.
|
||||
*
|
||||
@@ -681,7 +699,16 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
private Object getMappedInput(AggregationOperationContext context) {
|
||||
return input instanceof Field field ? context.getReference(field).toString() : input;
|
||||
|
||||
if (input instanceof Field field) {
|
||||
return context.getReference(field).toString();
|
||||
}
|
||||
|
||||
if (input instanceof AggregationExpression expression) {
|
||||
return expression.toDocument(context);
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
private Object getMappedCondition(AggregationOperationContext context) {
|
||||
@@ -715,6 +742,15 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
AsBuilder filter(Field field);
|
||||
|
||||
/**
|
||||
* Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return
|
||||
* @since 4.1.1
|
||||
*/
|
||||
AsBuilder filter(AggregationExpression expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -797,6 +833,14 @@ public class ArrayOperators {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AsBuilder filter(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null");
|
||||
filter.input = expression;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConditionBuilder as(String variableName) {
|
||||
|
||||
@@ -1333,7 +1377,7 @@ public class ArrayOperators {
|
||||
Assert.notNull(expressions, "PropertyExpressions must not be null");
|
||||
|
||||
return new Reduce(Fields.field(fieldReference), initialValue,
|
||||
Arrays.<AggregationExpression>asList(expressions));
|
||||
Arrays.<AggregationExpression> asList(expressions));
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1690,7 +1734,7 @@ public class ArrayOperators {
|
||||
* @author Christoph Strobl
|
||||
* @author Shashank Sharma
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
|
||||
* @since 2.2
|
||||
*/
|
||||
public static class In extends AbstractAggregationExpression {
|
||||
@@ -1779,7 +1823,7 @@ public class ArrayOperators {
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class ArrayToObject extends AbstractAggregationExpression {
|
||||
@@ -1976,7 +2020,7 @@ public class ArrayOperators {
|
||||
|
||||
/**
|
||||
* Set the order to put elements in.
|
||||
*
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return new instance of {@link SortArray}.
|
||||
*/
|
||||
|
||||
@@ -487,22 +487,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
&& instanceCreatorMetadata.hasParameters() ? getParameterProvider(context, entity, documentAccessor, evaluator)
|
||||
: NoOpParameterValueProvider.INSTANCE;
|
||||
|
||||
EntityInstantiator instantiator = entity.getInstanceCreator();
|
||||
if(instantiator != null) {
|
||||
provider = new ParameterValueProvider() {
|
||||
@Nullable
|
||||
public Object getParameterValue(Parameter parameter) {
|
||||
String name = parameter.getName();
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException(String.format("Parameter %s does not have a name", parameter));
|
||||
} else {
|
||||
return documentAccessor.get(entity.getRequiredPersistentProperty(name));
|
||||
}
|
||||
}
|
||||
};
|
||||
} else {
|
||||
instantiator = instantiators.getInstantiatorFor(entity);
|
||||
}
|
||||
EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity);
|
||||
S instance = instantiator.createInstance(entity, provider);
|
||||
|
||||
if (entity.requiresPropertyPopulation()) {
|
||||
|
||||
@@ -33,7 +33,6 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.bson.BsonReader;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonUndefined;
|
||||
import org.bson.BsonWriter;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
@@ -105,7 +104,6 @@ abstract class MongoConverters {
|
||||
converters.add(BinaryToByteArrayConverter.INSTANCE);
|
||||
converters.add(BsonTimestampToInstantConverter.INSTANCE);
|
||||
|
||||
converters.add(reading(BsonUndefined.class, Object.class, it -> null));
|
||||
converters.add(reading(String.class, URI.class, URI::create).andWriting(URI::toString));
|
||||
|
||||
return converters;
|
||||
|
||||
@@ -41,6 +41,7 @@ import org.springframework.data.convert.PropertyValueConversions;
|
||||
import org.springframework.data.convert.PropertyValueConverter;
|
||||
import org.springframework.data.convert.PropertyValueConverterFactory;
|
||||
import org.springframework.data.convert.PropertyValueConverterRegistrar;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.SimplePropertyValueConversions;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
@@ -361,6 +362,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}, this.propertyValueConversions);
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
private enum DateToUtcLocalDateTimeConverter implements Converter<Date, LocalDateTime> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -370,6 +372,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
private enum DateToUtcLocalTimeConverter implements Converter<Date, LocalTime> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -379,6 +382,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
private enum DateToUtcLocalDateConverter implements Converter<Date, LocalDate> {
|
||||
INSTANCE;
|
||||
|
||||
|
||||
@@ -31,9 +31,7 @@ import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.model.BasicPersistentEntity;
|
||||
import org.springframework.data.mapping.model.EntityInstantiator;
|
||||
import org.springframework.data.mongodb.MongoCollectionUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.MappingConfig.EntityConfig;
|
||||
import org.springframework.data.mongodb.util.encryption.EncryptionUtils;
|
||||
import org.springframework.data.spel.ExpressionDependencies;
|
||||
import org.springframework.data.util.Lazy;
|
||||
@@ -74,11 +72,6 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
private final @Nullable Expression collationExpression;
|
||||
|
||||
private final ShardKey shardKey;
|
||||
private EntityConfig entityConfig;
|
||||
|
||||
public BasicMongoPersistentEntity(TypeInformation<T> typeInformation) {
|
||||
this(typeInformation, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the
|
||||
@@ -86,18 +79,12 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
*
|
||||
* @param typeInformation must not be {@literal null}.
|
||||
*/
|
||||
public BasicMongoPersistentEntity(TypeInformation<T> typeInformation, EntityConfig<T> config) {
|
||||
public BasicMongoPersistentEntity(TypeInformation<T> typeInformation) {
|
||||
|
||||
super(typeInformation, MongoPersistentPropertyComparator.INSTANCE);
|
||||
|
||||
this.entityConfig = config;
|
||||
|
||||
Class<?> rawType = typeInformation.getType();
|
||||
String fallback = MongoCollectionUtils.getPreferredCollectionName(rawType);
|
||||
if (config != null) {
|
||||
fallback = config.collectionNameOrDefault(() -> MongoCollectionUtils.getPreferredCollectionName(rawType));
|
||||
|
||||
}
|
||||
|
||||
if (this.isAnnotationPresent(Document.class)) {
|
||||
Document document = this.getRequiredAnnotation(Document.class);
|
||||
@@ -262,12 +249,6 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
|
||||
Assert.notNull(property, "MongoPersistentProperty must not be null");
|
||||
|
||||
if (entityConfig != null) {
|
||||
if (entityConfig.isIdProperty(property)) {
|
||||
return property;
|
||||
}
|
||||
}
|
||||
|
||||
if (!property.isIdProperty()) {
|
||||
return null;
|
||||
}
|
||||
@@ -359,11 +340,6 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public EntityInstantiator getInstanceCreator() {
|
||||
return this.entityConfig != null ? this.entityConfig.getInstantiator() : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> getEncryptionKeyIds() {
|
||||
|
||||
@@ -422,9 +398,9 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
|
||||
if (persistentProperty.isDbReference() && persistentProperty.getDBRef().lazy()) {
|
||||
if (persistentProperty.isArray() || Modifier.isFinal(persistentProperty.getActualType().getModifiers())) {
|
||||
throw new MappingException(
|
||||
String.format("Invalid lazy DBRef property for %s; Found %s which must not be an array nor a final class",
|
||||
persistentProperty.getField(), persistentProperty.getActualType()));
|
||||
throw new MappingException(String.format(
|
||||
"Invalid lazy DBRef property for %s; Found %s which must not be an array nor a final class",
|
||||
persistentProperty.getField(), persistentProperty.getActualType()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,6 @@ import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.Property;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.MappingConfig.PropertyConfig;
|
||||
import org.springframework.data.mongodb.util.encryption.EncryptionUtils;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
@@ -74,12 +73,6 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
}
|
||||
|
||||
private final FieldNamingStrategy fieldNamingStrategy;
|
||||
PropertyConfig<?, ?> propertyConfig;
|
||||
|
||||
public BasicMongoPersistentProperty(Property property, MongoPersistentEntity<?> owner,
|
||||
SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy) {
|
||||
this(property, owner, simpleTypeHolder, fieldNamingStrategy, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentProperty}.
|
||||
@@ -90,12 +83,11 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
* @param fieldNamingStrategy can be {@literal null}.
|
||||
*/
|
||||
public BasicMongoPersistentProperty(Property property, MongoPersistentEntity<?> owner,
|
||||
SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy, @Nullable PropertyConfig<?,?> propertyConfig) {
|
||||
SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy) {
|
||||
|
||||
super(property, owner, simpleTypeHolder);
|
||||
this.fieldNamingStrategy = fieldNamingStrategy == null ? PropertyNameFieldNamingStrategy.INSTANCE
|
||||
: fieldNamingStrategy;
|
||||
this.propertyConfig = propertyConfig;
|
||||
|
||||
if (isIdProperty() && hasExplicitFieldName()) {
|
||||
|
||||
@@ -123,10 +115,6 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return true;
|
||||
}
|
||||
|
||||
if(propertyConfig != null && propertyConfig.isId()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// We need to support a wider range of ID types than just the ones that can be converted to an ObjectId
|
||||
// but still we need to check if there happens to be an explicit name set
|
||||
return SUPPORTED_ID_PROPERTY_NAMES.contains(getName()) && !hasExplicitFieldName();
|
||||
@@ -144,10 +132,6 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
*/
|
||||
public String getFieldName() {
|
||||
|
||||
if(propertyConfig != null && StringUtils.hasText(propertyConfig.getTargetName())) {
|
||||
return propertyConfig.getTargetName();
|
||||
}
|
||||
|
||||
if (isIdProperty()) {
|
||||
|
||||
if (getOwner().getIdProperty() == null) {
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.Property;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.MappingConfig.PropertyConfig;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
@@ -48,8 +47,8 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
* @param fieldNamingStrategy can be {@literal null}.
|
||||
*/
|
||||
public CachingMongoPersistentProperty(Property property, MongoPersistentEntity<?> owner,
|
||||
SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy, PropertyConfig config) {
|
||||
super(property, owner, simpleTypeHolder, fieldNamingStrategy, config);
|
||||
SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy) {
|
||||
super(property, owner, simpleTypeHolder, fieldNamingStrategy);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,238 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.springframework.data.mapping.Parameter;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.mapping.model.EntityInstantiator;
|
||||
import org.springframework.data.mapping.model.ParameterValueProvider;
|
||||
import org.springframework.data.mapping.model.PropertyValueProvider;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.data.util.MethodInvocationRecorder;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2023/06
|
||||
*/
|
||||
public class MappingConfig {
|
||||
|
||||
private final Map<Class, EntityConfig<?>> entityConfigMap;
|
||||
|
||||
MappingConfig(Map<Class, EntityConfig<?>> entityConfigMap) {
|
||||
this.entityConfigMap = entityConfigMap;
|
||||
}
|
||||
|
||||
public static MappingConfig none() {
|
||||
return new MappingConfig(Collections.emptyMap());
|
||||
}
|
||||
|
||||
public static MappingConfig mappingRules(Consumer<MappingRuleCustomizer> customizer) {
|
||||
MappingConfig mappingConfig = new MappingConfig(new HashMap<>());
|
||||
customizer.accept(new MappingRuleCustomizer() {
|
||||
@Override
|
||||
public <T> MappingRuleCustomizer add(Class<T> type, Consumer<EntityConfig<T>> cfg) {
|
||||
|
||||
EntityConfig<T> entityConfig = (EntityConfig<T>) mappingConfig.entityConfigMap.computeIfAbsent(type,
|
||||
(it) -> EntityConfig.configure(it));
|
||||
cfg.accept(entityConfig);
|
||||
return this;
|
||||
}
|
||||
});
|
||||
return mappingConfig;
|
||||
}
|
||||
|
||||
public interface MappingRuleCustomizer {
|
||||
<T> MappingRuleCustomizer add(Class<T> type, Consumer<EntityConfig<T>> cfg);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public <T> EntityConfig<T> getEntityConfig(Class<T> type) {
|
||||
return (EntityConfig<T>) entityConfigMap.get(type);
|
||||
}
|
||||
|
||||
public static class EntityConfig<T> {
|
||||
|
||||
private final Class<T> type;
|
||||
|
||||
@Nullable private Supplier<String> collectionName;
|
||||
Map<String, PropertyConfig<T, ?>> propertyConfigMap = new HashMap<>();
|
||||
EntityInstantiator instantiator;
|
||||
|
||||
public EntityConfig(Class<T> type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public static <T, P> EntityConfig<T> configure(Class<T> type) {
|
||||
return new EntityConfig<>(type);
|
||||
}
|
||||
|
||||
public <P> EntityConfig<T> define(String name, Consumer<PropertyConfig<T, P>> cfg) {
|
||||
|
||||
PropertyConfig<T, P> config = (PropertyConfig<T, P>) propertyConfigMap.computeIfAbsent(name,
|
||||
(key) -> new PropertyConfig<>(this.type, key));
|
||||
cfg.accept(config);
|
||||
return this;
|
||||
}
|
||||
|
||||
public <P> EntityConfig<T> define(Function<T, P> property, Consumer<PropertyConfig<T, P>> cfg) {
|
||||
|
||||
String propertyName = MethodInvocationRecorder.forProxyOf(type).record(property).getPropertyPath()
|
||||
.orElseThrow(() -> new IllegalArgumentException("Cannot obtain property name"));
|
||||
|
||||
return define(propertyName, cfg);
|
||||
}
|
||||
|
||||
public EntityConfig<T> namespace(String name) {
|
||||
return namespace(() -> name);
|
||||
}
|
||||
|
||||
public EntityConfig<T> namespace(Supplier<String> name) {
|
||||
this.collectionName = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
boolean isIdProperty(PersistentProperty<?> property) {
|
||||
PropertyConfig<T, ?> propertyConfig = propertyConfigMap.get(property.getName());
|
||||
if (propertyConfig == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return propertyConfig.isId();
|
||||
}
|
||||
|
||||
String collectionNameOrDefault(Supplier<String> fallback) {
|
||||
return collectionName != null ? collectionName.get() : fallback.get();
|
||||
}
|
||||
|
||||
public EntityInstantiator getInstantiator() {
|
||||
return instantiator;
|
||||
}
|
||||
|
||||
public EntityConfig<T> entityCreator(Function<Arguments<T>, T> createFunction) {
|
||||
|
||||
instantiator = new EntityInstantiator() {
|
||||
|
||||
@Override
|
||||
public <T, E extends PersistentEntity<? extends T, P>, P extends PersistentProperty<P>> T createInstance(
|
||||
E entity, ParameterValueProvider<P> provider) {
|
||||
Map<String, Object> targetMap = new HashMap<>();
|
||||
|
||||
|
||||
PropertyValueProvider pvv = provider instanceof PropertyValueProvider pvp ? pvp : new PropertyValueProvider<P>() {
|
||||
@Nullable
|
||||
@Override
|
||||
public <T> T getPropertyValue(P property) {
|
||||
Parameter parameter = new Parameter<>(property.getName(), (TypeInformation) property.getTypeInformation(),
|
||||
new Annotation[] {}, null);
|
||||
return (T) provider.getParameterValue(parameter);
|
||||
}
|
||||
};
|
||||
|
||||
entity.doWithProperties((SimplePropertyHandler) property -> {
|
||||
targetMap.put(property.getName(), pvv.getPropertyValue(property));
|
||||
});
|
||||
|
||||
return (T) createFunction.apply(new Arguments() {
|
||||
|
||||
private Map<Function, String> resolvedName = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public Object get(String arg) {
|
||||
return targetMap.get(arg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class getType() {
|
||||
return entity.getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(Function property) {
|
||||
|
||||
String name = resolvedName.computeIfAbsent(property, key -> (String) MethodInvocationRecorder.forProxyOf(getType()).record(property).getPropertyPath().orElse(""));
|
||||
return get(name);
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
return this;
|
||||
}
|
||||
|
||||
public interface Arguments<T> {
|
||||
|
||||
<V> V get(String arg);
|
||||
|
||||
default <V> V get(Function<T, V> property) {
|
||||
String propertyName = MethodInvocationRecorder.forProxyOf(getType()).record(property).getPropertyPath()
|
||||
.orElseThrow(() -> new IllegalArgumentException("Cannot obtain property name"));
|
||||
|
||||
return get(propertyName);
|
||||
}
|
||||
|
||||
Class<T> getType();
|
||||
}
|
||||
}
|
||||
|
||||
public static class PropertyConfig<T, P> {
|
||||
|
||||
private final Class<T> owingType;
|
||||
private final String propertyName;
|
||||
private String fieldName;
|
||||
private boolean isId;
|
||||
private boolean isTransient;
|
||||
|
||||
public PropertyConfig(Class<T> owingType, String propertyName) {
|
||||
this.owingType = owingType;
|
||||
this.propertyName = propertyName;
|
||||
}
|
||||
|
||||
public PropertyConfig<T, P> useAsId() {
|
||||
this.isId = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isId() {
|
||||
return isId;
|
||||
}
|
||||
|
||||
public PropertyConfig<T, P> setTransient() {
|
||||
this.isTransient = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public PropertyConfig<T, P> mappedName(String fieldName) {
|
||||
this.fieldName = fieldName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getTargetName() {
|
||||
return this.fieldName;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -16,7 +16,6 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.AbstractMap;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
@@ -27,7 +26,6 @@ import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.Property;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.MappingConfig.MappingRuleCustomizer;
|
||||
import org.springframework.data.util.NullableWrapperConverters;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -47,7 +45,6 @@ public class MongoMappingContext extends AbstractMappingContext<MongoPersistentE
|
||||
|
||||
private FieldNamingStrategy fieldNamingStrategy = DEFAULT_NAMING_STRATEGY;
|
||||
private boolean autoIndexCreation = false;
|
||||
private MappingConfig mappingConfig;
|
||||
|
||||
@Nullable
|
||||
private ApplicationContext applicationContext;
|
||||
@@ -70,14 +67,6 @@ public class MongoMappingContext extends AbstractMappingContext<MongoPersistentE
|
||||
this.fieldNamingStrategy = fieldNamingStrategy == null ? DEFAULT_NAMING_STRATEGY : fieldNamingStrategy;
|
||||
}
|
||||
|
||||
public void setMappingConfig(MappingConfig mappingConfig) {
|
||||
this.mappingConfig = mappingConfig;
|
||||
}
|
||||
|
||||
public void mappingRules(Consumer<MappingRuleCustomizer> customizer) {
|
||||
setMappingConfig(MappingConfig.mappingRules(customizer));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean shouldCreatePersistentEntityFor(TypeInformation<?> type) {
|
||||
|
||||
@@ -91,12 +80,12 @@ public class MongoMappingContext extends AbstractMappingContext<MongoPersistentE
|
||||
@Override
|
||||
public MongoPersistentProperty createPersistentProperty(Property property, MongoPersistentEntity<?> owner,
|
||||
SimpleTypeHolder simpleTypeHolder) {
|
||||
return new CachingMongoPersistentProperty(property, owner, simpleTypeHolder, fieldNamingStrategy, mappingConfig != null ? mappingConfig.getEntityConfig(owner.getType()).propertyConfigMap.get(property.getName()) : null);
|
||||
return new CachingMongoPersistentProperty(property, owner, simpleTypeHolder, fieldNamingStrategy);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected <T> BasicMongoPersistentEntity<T> createPersistentEntity(TypeInformation<T> typeInformation) {
|
||||
return new BasicMongoPersistentEntity<>(typeInformation, mappingConfig != null ? mappingConfig.getEntityConfig(typeInformation.getType()) : null);
|
||||
return new BasicMongoPersistentEntity<>(typeInformation);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.model.EntityInstantiator;
|
||||
import org.springframework.data.mapping.model.MutablePersistentEntity;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
@@ -112,8 +111,4 @@ public interface MongoPersistentEntity<T> extends MutablePersistentEntity<T, Mon
|
||||
*/
|
||||
@Nullable
|
||||
Collection<Object> getEncryptionKeyIds();
|
||||
|
||||
default EntityInstantiator getInstanceCreator() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,25 +89,22 @@ public enum MongoRegexCreator {
|
||||
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, matcherType);
|
||||
|
||||
switch (matcherType) {
|
||||
case STARTING_WITH:
|
||||
return String.format("^%s", regex);
|
||||
case ENDING_WITH:
|
||||
return String.format("%s$", regex);
|
||||
case CONTAINING:
|
||||
return String.format(".*%s.*", regex);
|
||||
case EXACT:
|
||||
return String.format("^%s$", regex);
|
||||
default:
|
||||
return regex;
|
||||
}
|
||||
return switch (matcherType) {
|
||||
case STARTING_WITH -> String.format("^%s", regex);
|
||||
case ENDING_WITH -> String.format("%s$", regex);
|
||||
case CONTAINING -> String.format(".*%s.*", regex);
|
||||
case EXACT -> String.format("^%s$", regex);
|
||||
default -> regex;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source
|
||||
* @return
|
||||
* @since 2.2.14
|
||||
* @deprecated since 4.1.1
|
||||
*/
|
||||
@Deprecated(since = "4.1.1", forRemoval = true)
|
||||
public Object toCaseInsensitiveMatch(Object source) {
|
||||
return source instanceof String stringValue ? new BsonRegularExpression(Pattern.quote(stringValue), "i") : source;
|
||||
}
|
||||
|
||||
@@ -22,7 +22,6 @@ import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
@@ -31,7 +30,6 @@ import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -58,14 +56,12 @@ import com.mongodb.client.gridfs.model.GridFSUploadOptions;
|
||||
*/
|
||||
public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOperations, ResourcePatternResolver {
|
||||
|
||||
private final Supplier<GridFSBucket> bucketSupplier;
|
||||
private final MongoDatabaseFactory dbFactory;
|
||||
|
||||
private final @Nullable String bucket;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GridFsTemplate} using the given {@link MongoDatabaseFactory} and {@link MongoConverter}.
|
||||
* <p>
|
||||
* Note that the {@link GridFSBucket} is obtained only once from {@link MongoDatabaseFactory#getMongoDatabase()
|
||||
* MongoDatabase}. Use {@link #GridFsTemplate(MongoConverter, Supplier)} if you want to use different buckets from the
|
||||
* same Template instance.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param converter must not be {@literal null}.
|
||||
@@ -76,34 +72,19 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
|
||||
/**
|
||||
* Creates a new {@link GridFsTemplate} using the given {@link MongoDatabaseFactory} and {@link MongoConverter}.
|
||||
* <p>
|
||||
* Note that the {@link GridFSBucket} is obtained only once from {@link MongoDatabaseFactory#getMongoDatabase()
|
||||
* MongoDatabase}. Use {@link #GridFsTemplate(MongoConverter, Supplier)} if you want to use different buckets from the
|
||||
* same Template instance.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param converter must not be {@literal null}.
|
||||
* @param bucket can be {@literal null}.
|
||||
*/
|
||||
public GridFsTemplate(MongoDatabaseFactory dbFactory, MongoConverter converter, @Nullable String bucket) {
|
||||
this(converter, Lazy.of(() -> getGridFs(dbFactory, bucket)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GridFsTemplate} using the given {@link MongoConverter} and {@link Supplier} providing the
|
||||
* required {@link GridFSBucket}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
* @param gridFSBucket must not be {@literal null}.
|
||||
* @since 4.2
|
||||
*/
|
||||
public GridFsTemplate(MongoConverter converter, Supplier<GridFSBucket> gridFSBucket) {
|
||||
|
||||
super(converter);
|
||||
|
||||
Assert.notNull(gridFSBucket, "GridFSBucket supplier must not be null");
|
||||
Assert.notNull(dbFactory, "MongoDbFactory must not be null");
|
||||
|
||||
this.bucketSupplier = gridFSBucket;
|
||||
this.dbFactory = dbFactory;
|
||||
this.bucket = bucket;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -168,7 +149,7 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
|
||||
@Override
|
||||
public ClassLoader getClassLoader() {
|
||||
return null;
|
||||
return dbFactory.getClass().getClassLoader();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -212,10 +193,6 @@ public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOpe
|
||||
}
|
||||
|
||||
private GridFSBucket getGridFs() {
|
||||
return this.bucketSupplier.get();
|
||||
}
|
||||
|
||||
private static GridFSBucket getGridFs(MongoDatabaseFactory dbFactory, @Nullable String bucket) {
|
||||
|
||||
Assert.notNull(dbFactory, "MongoDatabaseFactory must not be null");
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.reactivestreams.Publisher;
|
||||
|
||||
import org.springframework.core.io.buffer.DataBuffer;
|
||||
import org.springframework.core.io.buffer.DataBufferFactory;
|
||||
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
|
||||
@@ -36,7 +37,6 @@ import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -61,17 +61,13 @@ import com.mongodb.reactivestreams.client.gridfs.GridFSUploadPublisher;
|
||||
*/
|
||||
public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements ReactiveGridFsOperations {
|
||||
|
||||
private final ReactiveMongoDatabaseFactory dbFactory;
|
||||
private final DataBufferFactory dataBufferFactory;
|
||||
private final Mono<GridFSBucket> bucketSupplier;
|
||||
private final @Nullable String bucket;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ReactiveGridFsTemplate} using the given {@link ReactiveMongoDatabaseFactory} and
|
||||
* {@link MongoConverter}.
|
||||
* <p>
|
||||
* Note that the {@link GridFSBucket} is obtained only once from
|
||||
* {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use
|
||||
* {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from
|
||||
* the same Template instance.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param converter must not be {@literal null}.
|
||||
@@ -83,11 +79,6 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
/**
|
||||
* Creates a new {@link ReactiveGridFsTemplate} using the given {@link ReactiveMongoDatabaseFactory} and
|
||||
* {@link MongoConverter}.
|
||||
* <p>
|
||||
* Note that the {@link GridFSBucket} is obtained only once from
|
||||
* {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use
|
||||
* {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from
|
||||
* the same Template instance.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param converter must not be {@literal null}.
|
||||
@@ -101,11 +92,6 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
/**
|
||||
* Creates a new {@link ReactiveGridFsTemplate} using the given {@link DataBufferFactory},
|
||||
* {@link ReactiveMongoDatabaseFactory} and {@link MongoConverter}.
|
||||
* <p>
|
||||
* Note that the {@link GridFSBucket} is obtained only once from
|
||||
* {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use
|
||||
* {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from
|
||||
* the same Template instance.
|
||||
*
|
||||
* @param dataBufferFactory must not be {@literal null}.
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
@@ -114,28 +100,15 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
*/
|
||||
public ReactiveGridFsTemplate(DataBufferFactory dataBufferFactory, ReactiveMongoDatabaseFactory dbFactory,
|
||||
MongoConverter converter, @Nullable String bucket) {
|
||||
this(converter, Mono.defer(Lazy.of(() -> doGetBucket(dbFactory, bucket))), dataBufferFactory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ReactiveGridFsTemplate} using the given {@link MongoConverter}, {@link Mono} emitting a
|
||||
* {@link ReactiveMongoDatabaseFactory} and {@link DataBufferFactory}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
* @param gridFSBucket must not be {@literal null}.
|
||||
* @param dataBufferFactory must not be {@literal null}.
|
||||
* @since 4.2
|
||||
*/
|
||||
public ReactiveGridFsTemplate(MongoConverter converter, Mono<GridFSBucket> gridFSBucket,
|
||||
DataBufferFactory dataBufferFactory) {
|
||||
|
||||
super(converter);
|
||||
|
||||
Assert.notNull(gridFSBucket, "GridFSBucket Mono must not be null");
|
||||
Assert.notNull(dataBufferFactory, "DataBufferFactory must not be null");
|
||||
Assert.notNull(dbFactory, "ReactiveMongoDatabaseFactory must not be null");
|
||||
|
||||
this.bucketSupplier = gridFSBucket;
|
||||
this.dataBufferFactory = dataBufferFactory;
|
||||
this.dbFactory = dbFactory;
|
||||
this.bucket = bucket;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -277,13 +250,6 @@ public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements R
|
||||
}
|
||||
|
||||
protected Mono<GridFSBucket> doGetBucket() {
|
||||
return bucketSupplier;
|
||||
}
|
||||
|
||||
private static Mono<GridFSBucket> doGetBucket(ReactiveMongoDatabaseFactory dbFactory, @Nullable String bucket) {
|
||||
|
||||
Assert.notNull(dbFactory, "ReactiveMongoDatabaseFactory must not be null");
|
||||
|
||||
return dbFactory.getMongoDatabase()
|
||||
.map(db -> bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket));
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Range.Bound;
|
||||
import org.springframework.data.domain.Sort;
|
||||
@@ -52,6 +52,7 @@ import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -352,6 +353,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
* @param part
|
||||
* @return the regex options or {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
private String toRegexOptions(Part part) {
|
||||
|
||||
String regexOptions = null;
|
||||
@@ -390,7 +392,18 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
Streamable<?> streamable = asStreamable(iterator.next());
|
||||
if (!isSimpleComparisionPossible(part)) {
|
||||
streamable = streamable.map(MongoRegexCreator.INSTANCE::toCaseInsensitiveMatch);
|
||||
|
||||
MatchMode matchMode = toMatchMode(part.getType());
|
||||
String regexOptions = toRegexOptions(part);
|
||||
|
||||
streamable = streamable.map(it -> {
|
||||
if (it instanceof String value) {
|
||||
|
||||
return new BsonRegularExpression(MongoRegexCreator.INSTANCE.toRegularExpression(value, matchMode),
|
||||
regexOptions);
|
||||
}
|
||||
return it;
|
||||
});
|
||||
}
|
||||
|
||||
return streamable.toList();
|
||||
@@ -481,6 +494,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
return MatchMode.REGEX;
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
case SIMPLE_PROPERTY:
|
||||
case IN:
|
||||
return MatchMode.EXACT;
|
||||
default:
|
||||
return MatchMode.DEFAULT;
|
||||
|
||||
@@ -20,11 +20,6 @@ import static org.springframework.test.util.ReflectionTestUtils.*;
|
||||
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.gridfs.GridFSBucket;
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -173,12 +168,8 @@ public class MongoNamespaceTests {
|
||||
assertThat(ctx.containsBean("gridFsTemplate")).isTrue();
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("gridFsTemplate");
|
||||
|
||||
Supplier<GridFSBucket> gridFSBucketSupplier = (Supplier<GridFSBucket>) getField(operations, "bucketSupplier");
|
||||
GridFSBucket gfsBucket = gridFSBucketSupplier.get();
|
||||
assertThat(gfsBucket.getBucketName()).isEqualTo("fs"); // fs is the default
|
||||
|
||||
MongoCollection<GridFSFile> filesCollection = (MongoCollection<GridFSFile>) getField(gfsBucket, "filesCollection");
|
||||
assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database");
|
||||
MongoDatabaseFactory dbf = (MongoDatabaseFactory) getField(operations, "dbFactory");
|
||||
assertThat(getField(dbf, "databaseName")).isEqualTo("database");
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertThat(converter).isNotNull();
|
||||
@@ -190,12 +181,9 @@ public class MongoNamespaceTests {
|
||||
assertThat(ctx.containsBean("secondGridFsTemplate")).isTrue();
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("secondGridFsTemplate");
|
||||
|
||||
Supplier<GridFSBucket> gridFSBucketSupplier = (Supplier<GridFSBucket>) getField(operations, "bucketSupplier");
|
||||
GridFSBucket gfsBucket = gridFSBucketSupplier.get();
|
||||
assertThat(gfsBucket.getBucketName()).isEqualTo("fs"); // fs is the default
|
||||
|
||||
MongoCollection<GridFSFile> filesCollection = (MongoCollection<GridFSFile>) getField(gfsBucket, "filesCollection");
|
||||
assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database");
|
||||
MongoDatabaseFactory dbf = (MongoDatabaseFactory) getField(operations, "dbFactory");
|
||||
assertThat(getField(dbf, "databaseName")).isEqualTo("database");
|
||||
assertThat(getField(operations, "bucket")).isEqualTo(null);
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertThat(converter).isNotNull();
|
||||
@@ -207,12 +195,9 @@ public class MongoNamespaceTests {
|
||||
assertThat(ctx.containsBean("thirdGridFsTemplate")).isTrue();
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("thirdGridFsTemplate");
|
||||
|
||||
Supplier<GridFSBucket> gridFSBucketSupplier = (Supplier<GridFSBucket>) getField(operations, "bucketSupplier");
|
||||
GridFSBucket gfsBucket = gridFSBucketSupplier.get();
|
||||
assertThat(gfsBucket.getBucketName()).isEqualTo("bucketString"); // fs is the default
|
||||
|
||||
MongoCollection<GridFSFile> filesCollection = (MongoCollection<GridFSFile>) getField(gfsBucket, "filesCollection");
|
||||
assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database");
|
||||
MongoDatabaseFactory dbf = (MongoDatabaseFactory) getField(operations, "dbFactory");
|
||||
assertThat(getField(dbf, "databaseName")).isEqualTo("database");
|
||||
assertThat(getField(operations, "bucket")).isEqualTo("bucketString");
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertThat(converter).isNotNull();
|
||||
|
||||
@@ -35,7 +35,6 @@ import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoId;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.test.util.MongoTemplateExtension;
|
||||
import org.springframework.data.mongodb.test.util.MongoTestTemplate;
|
||||
import org.springframework.data.mongodb.test.util.Template;
|
||||
@@ -233,53 +232,6 @@ public class MongoTemplateDbRefTests {
|
||||
assertThat(target.getValue()).containsExactlyInAnyOrder(one, two);
|
||||
}
|
||||
|
||||
@Test // GH-2191
|
||||
void shouldAllowToSliceCollectionOfDbRefs() {
|
||||
|
||||
JustSomeType one = new JustSomeType();
|
||||
one.value = "one";
|
||||
|
||||
JustSomeType two = new JustSomeType();
|
||||
two.value = "two";
|
||||
|
||||
template.insertAll(Arrays.asList(one, two));
|
||||
|
||||
WithCollectionDbRef source = new WithCollectionDbRef();
|
||||
source.refs = Arrays.asList(one, two);
|
||||
|
||||
template.save(source);
|
||||
|
||||
Query theQuery = query(where("id").is(source.id));
|
||||
theQuery.fields().slice("refs", 1, 1);
|
||||
|
||||
WithCollectionDbRef target = template.findOne(theQuery, WithCollectionDbRef.class);
|
||||
assertThat(target.getRefs()).containsExactly(two);
|
||||
}
|
||||
|
||||
@Test // GH-2191
|
||||
void shouldAllowToSliceCollectionOfLazyDbRefs() {
|
||||
|
||||
JustSomeType one = new JustSomeType();
|
||||
one.value = "one";
|
||||
|
||||
JustSomeType two = new JustSomeType();
|
||||
two.value = "two";
|
||||
|
||||
template.insertAll(Arrays.asList(one, two));
|
||||
|
||||
WithCollectionDbRef source = new WithCollectionDbRef();
|
||||
source.lazyrefs = Arrays.asList(one, two);
|
||||
|
||||
template.save(source);
|
||||
|
||||
Query theQuery = query(where("id").is(source.id));
|
||||
theQuery.fields().slice("lazyrefs", 1, 1);
|
||||
|
||||
WithCollectionDbRef target = template.findOne(theQuery, WithCollectionDbRef.class);
|
||||
LazyLoadingTestUtils.assertProxyIsResolved(target.lazyrefs, false);
|
||||
assertThat(target.getLazyrefs()).containsExactly(two);
|
||||
}
|
||||
|
||||
@Data
|
||||
@Document("cycle-with-different-type-root")
|
||||
static class RefCycleLoadingIntoDifferentTypeRoot {
|
||||
@@ -312,16 +264,6 @@ public class MongoTemplateDbRefTests {
|
||||
String value;
|
||||
}
|
||||
|
||||
@Data
|
||||
static class WithCollectionDbRef {
|
||||
|
||||
@Id String id;
|
||||
|
||||
@DBRef List<JustSomeType> refs;
|
||||
|
||||
@DBRef(lazy = true) List<JustSomeType> lazyrefs;
|
||||
}
|
||||
|
||||
@Data
|
||||
static class WithDBRefOnRawStringId {
|
||||
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.springframework.data.mongodb.core.mapping.MappingConfig.*;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.MongoClients;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2023/06
|
||||
*/
|
||||
public class MongoTemplateMappingConfigTests {
|
||||
|
||||
@Test
|
||||
void testProgrammaticMetadata() {
|
||||
|
||||
SimpleMongoClientDatabaseFactory dbFactory = new SimpleMongoClientDatabaseFactory(MongoClients.create(),
|
||||
"test-manual-config");
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.mappingRules(rules -> {
|
||||
rules.add(Sample.class, cfg -> {
|
||||
cfg.namespace("my-sample");
|
||||
cfg.entityCreator(args -> {
|
||||
return new Sample(args.get(Sample::getName));
|
||||
});
|
||||
cfg.define(Sample::getName, PropertyConfig::useAsId);
|
||||
cfg.define(Sample::getValue, property -> property.mappedName("va-l-ue"));
|
||||
});
|
||||
});
|
||||
mappingContext.afterPropertiesSet();
|
||||
|
||||
MappingMongoConverter mappingMongoConverter = new MappingMongoConverter(dbFactory, mappingContext);
|
||||
mappingMongoConverter.afterPropertiesSet();
|
||||
|
||||
MongoTemplate template = new MongoTemplate(dbFactory, mappingMongoConverter);
|
||||
template.dropCollection(Sample.class);
|
||||
|
||||
Sample sample = new Sample("s1");
|
||||
sample.value = "val";
|
||||
template.save(sample);
|
||||
|
||||
Document dbValue = template.execute("my-sample", collection -> {
|
||||
return collection.find(new Document()).first();
|
||||
});
|
||||
|
||||
System.out.println("dbValue: " + dbValue);
|
||||
assertThat(dbValue).containsEntry("_id", sample.name).containsEntry("va-l-ue", sample.value);
|
||||
|
||||
List<Sample> entries = template.find(Query.query(Criteria.where("name").is(sample.name)), Sample.class);
|
||||
entries.forEach(System.out::println);
|
||||
|
||||
assertThat(entries).containsExactly(sample);
|
||||
}
|
||||
|
||||
@Data
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = "my-sample")
|
||||
static class Sample {
|
||||
|
||||
Sample(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Id final String name;
|
||||
|
||||
@Field(name = "va-l-ue") String value;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -193,9 +193,30 @@ class MongoTemplateScrollTests {
|
||||
window = template.scroll(q.with(window.positionAt(0)).limit(2), Person.class);
|
||||
|
||||
assertThat(window).hasSize(2);
|
||||
assertThat(window).containsOnly(john20, john40_1);
|
||||
assertThat(window.hasNext()).isTrue();
|
||||
assertThat(window.isLast()).isFalse();
|
||||
assertThat(window).containsOnly(jane_20, jane_40);
|
||||
assertThat(window.hasNext()).isFalse();
|
||||
assertThat(window.isLast()).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4413
|
||||
void shouldAllowInitialBackwardSort() {
|
||||
|
||||
Person jane_20 = new Person("Jane", 20);
|
||||
Person jane_40 = new Person("Jane", 40);
|
||||
Person jane_42 = new Person("Jane", 42);
|
||||
Person john20 = new Person("John", 20);
|
||||
Person john40_1 = new Person("John", 40);
|
||||
Person john40_2 = new Person("John", 40);
|
||||
|
||||
template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42));
|
||||
Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age"));
|
||||
q.with(ScrollPosition.keyset().backward()).limit(3);
|
||||
|
||||
Window<Person> window = template.scroll(q, Person.class);
|
||||
assertThat(window).containsExactly(john20, john40_1, john40_2);
|
||||
|
||||
window = template.scroll(q.with(window.positionAt(0)).limit(3), Person.class);
|
||||
assertThat(window).containsExactly(jane_20, jane_40, jane_42);
|
||||
}
|
||||
|
||||
@ParameterizedTest // GH-4308
|
||||
|
||||
@@ -69,6 +69,8 @@ import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.StringOperators;
|
||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
import org.springframework.data.mongodb.core.index.Index;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
@@ -1789,6 +1791,30 @@ public class MongoTemplateTests {
|
||||
assertThat(result.get(0).date).isNotNull();
|
||||
}
|
||||
|
||||
@Test // GH-4390
|
||||
void nativeDriverDateTimeCodecShouldBeApplied/*when configured*/() {
|
||||
|
||||
MongoTestTemplate ops = new MongoTestTemplate(cfg -> {
|
||||
cfg.configureConversion(conversion -> {
|
||||
conversion.customConversions(
|
||||
MongoCustomConversions.create(MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs));
|
||||
});
|
||||
});
|
||||
|
||||
TypeWithDate source = new TypeWithDate();
|
||||
source.id = "id-1";
|
||||
source.date = Date.from(Instant.now());
|
||||
|
||||
ops.save(source);
|
||||
|
||||
var dbDate = ops.execute(TypeWithDate.class,
|
||||
collection -> collection.find(new org.bson.Document("_id", source.id)).first().get("date"));
|
||||
|
||||
TypeWithDate target = ops.findOne(query(where("date").is(source.date)), TypeWithDate.class);
|
||||
|
||||
assertThat(target.date).isEqualTo(source.date).isEqualTo(dbDate);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-540
|
||||
public void findOneAfterUpsertForNonExistingObjectReturnsTheInsertedObject() {
|
||||
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.AssertionsForClassTypes.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.data.domain.KeysetScrollPosition;
|
||||
import org.springframework.data.domain.ScrollPosition;
|
||||
import org.springframework.data.domain.Window;
|
||||
import org.springframework.data.mongodb.core.EntityOperations.Entity;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link ScrollUtils}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class ScrollUtilsUnitTests {
|
||||
|
||||
@Test // GH-4413
|
||||
void positionShouldRetainScrollDirection() {
|
||||
|
||||
Query query = new Query();
|
||||
query.with(ScrollPosition.keyset().backward());
|
||||
EntityOperations entityOperationsMock = mock(EntityOperations.class);
|
||||
Entity entityMock = mock(Entity.class);
|
||||
|
||||
when(entityOperationsMock.forEntity(any())).thenReturn(entityMock);
|
||||
when(entityMock.extractKeys(any(), any())).thenReturn(Map.of("k", "v"));
|
||||
|
||||
Window<Integer> window = ScrollUtils.createWindow(query, new ArrayList<>(List.of(1, 2, 3)), Integer.class,
|
||||
entityOperationsMock);
|
||||
|
||||
assertThat(window.positionAt(0)).isInstanceOf(KeysetScrollPosition.class);
|
||||
assertThat(((KeysetScrollPosition) window.positionAt(0)).scrollsBackward()).isTrue();
|
||||
}
|
||||
}
|
||||
@@ -117,6 +117,23 @@ class FilterExpressionUnitTests {
|
||||
assertThat($filter).isEqualTo(new Document(expected));
|
||||
}
|
||||
|
||||
@Test // GH-4394
|
||||
void filterShouldAcceptExpression() {
|
||||
|
||||
Document $filter = ArrayOperators.arrayOf(ObjectOperators.valueOf("data.metadata").toArray()).filter().as("item")
|
||||
.by(ComparisonOperators.valueOf("item.price").greaterThan("field-1")).toDocument(Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
Document expected = Document.parse("""
|
||||
{ $filter : {
|
||||
input: { $objectToArray: "$data.metadata" },
|
||||
as: "item",
|
||||
cond: { $gt: [ "$$item.price", "$field-1" ] }
|
||||
}}
|
||||
""");
|
||||
|
||||
assertThat($filter).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private Document extractFilterOperatorFromDocument(Document source) {
|
||||
|
||||
List<Object> pipeline = DocumentTestUtils.getAsDBList(source, "pipeline");
|
||||
|
||||
@@ -34,7 +34,6 @@ import java.time.ZoneOffset;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
|
||||
import org.bson.BsonUndefined;
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.Decimal128;
|
||||
@@ -2844,13 +2843,6 @@ class MappingMongoConverterUnitTests {
|
||||
assertThat(converter.read(Address.class, source).city).isEqualTo("Gotham,Metropolis");
|
||||
}
|
||||
|
||||
@Test // GH-2350
|
||||
void shouldConvertBsonUndefinedToNull() {
|
||||
|
||||
org.bson.Document source = new org.bson.Document("s", "hallway drive").append("city", new BsonUndefined());
|
||||
assertThat(converter.read(Address.class, source).city).isNull();
|
||||
}
|
||||
|
||||
static class GenericType<T> {
|
||||
T content;
|
||||
}
|
||||
|
||||
@@ -64,6 +64,16 @@ class MongoCustomConversionsUnitTests {
|
||||
assertThat(conversions.getPropertyValueConversions().hasValueConverter(persistentProperty)).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4390
|
||||
void doesNotReturnConverterForNativeTimeTimeIfUsingDriverCodec() {
|
||||
|
||||
MongoCustomConversions conversions = MongoCustomConversions.create(config -> {
|
||||
config.useNativeDriverJavaTimeCodecs();
|
||||
});
|
||||
|
||||
assertThat(conversions.getCustomWriteTarget(Date.class)).isEmpty();
|
||||
}
|
||||
|
||||
static class DateToZonedDateTimeConverter implements Converter<Date, ZonedDateTime> {
|
||||
|
||||
@Override
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.mapping.MappingConfig.EntityConfig.*;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
@@ -27,21 +26,18 @@ import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import lombok.Data;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mongodb.core.mapping.MappingConfig.EntityConfig;
|
||||
import org.springframework.data.mongodb.core.mapping.MappingConfig.PropertyConfig;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.spel.ExtensionAwareEvaluationContextProvider;
|
||||
import org.springframework.data.spel.spi.EvaluationContextExtension;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link BasicMongoPersistentEntity}.
|
||||
@@ -305,31 +301,6 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
return new BasicMongoPersistentEntity<>(ClassTypeInformation.from(type));
|
||||
}
|
||||
|
||||
@Data
|
||||
class Sample {
|
||||
|
||||
String name;
|
||||
String value;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testProgrammaticMetadata() {
|
||||
|
||||
doReturn("value").when(propertyMock).getName();
|
||||
|
||||
EntityConfig<Sample> entityConfig = configure(Sample.class) //
|
||||
.namespace("my-collection") //
|
||||
.define(Sample::getValue, PropertyConfig::useAsId)
|
||||
.define(Sample::getName, property -> property.mappedName("n-a-m-e"));
|
||||
|
||||
BasicMongoPersistentEntity<Sample> entity = new BasicMongoPersistentEntity<>(TypeInformation.of(Sample.class), entityConfig);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
MongoPersistentProperty idProperty = entity.getIdProperty();
|
||||
assertThat(idProperty).isSameAs(propertyMock);
|
||||
assertThat(entity.getCollection()).isEqualTo("my-collection");
|
||||
}
|
||||
|
||||
@Document("contacts")
|
||||
class Contact {}
|
||||
|
||||
|
||||
@@ -1510,9 +1510,16 @@ public abstract class AbstractPersonRepositoryIntegrationTests implements Dirtie
|
||||
assertThat(result.get(0).getId().equals(bart.getId()));
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
@Test // GH-3395, GH-4404
|
||||
void caseInSensitiveInClause() {
|
||||
|
||||
assertThat(repository.findByLastnameIgnoreCaseIn("bEAuFoRd", "maTTheWs")).hasSize(3);
|
||||
|
||||
repository.save(new Person("the-first", "The First"));
|
||||
repository.save(new Person("the-first-one", "The First One"));
|
||||
repository.save(new Person("the-second", "The Second"));
|
||||
|
||||
assertThat(repository.findByLastnameIgnoreCaseIn("tHE fIRsT")).hasSize(1);
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
import static org.springframework.data.mongodb.repository.query.StubParameterAccessor.*;
|
||||
@@ -25,6 +24,7 @@ import java.lang.reflect.Method;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
@@ -273,6 +273,17 @@ class MongoQueryCreatorUnitTests {
|
||||
assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i")));
|
||||
}
|
||||
|
||||
@Test // GH-4404
|
||||
void createsQueryWithFindByInClauseHavingIgnoreCaseCorrectly() {
|
||||
|
||||
PartTree tree = new PartTree("findAllByFirstNameInIgnoreCase", Person.class);
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, List.of("da've", "carter")), context);
|
||||
|
||||
Query query = creator.createQuery();
|
||||
assertThat(query).isEqualTo(query(where("firstName")
|
||||
.in(List.of(new BsonRegularExpression("^\\Qda've\\E$", "i"), new BsonRegularExpression("^carter$", "i")))));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-770
|
||||
void createsQueryWithFindByNotIgnoreCaseCorrectly() {
|
||||
|
||||
|
||||
@@ -25,16 +25,16 @@ The following example shows how to use Change Streams with `MessageListener` ins
|
||||
[source,java]
|
||||
----
|
||||
MessageListenerContainer container = new DefaultMessageListenerContainer(template);
|
||||
container.start(); <1>
|
||||
container.start(); <1>
|
||||
|
||||
MessageListener<ChangeStreamDocument<Document>, User> listener = System.out::println; <2>
|
||||
ChangeStreamRequestOptions options = new ChangeStreamRequestOptions("db", "user", ChangeStreamOptions.empty()); <3>
|
||||
MessageListener<ChangeStreamDocument<Document>, User> listener = System.out::println; <2>
|
||||
ChangeStreamRequestOptions options = new ChangeStreamRequestOptions("user", ChangeStreamOptions.empty()); <3>
|
||||
|
||||
Subscription subscription = container.register(new ChangeStreamRequest<>(listener, options), User.class); <4>
|
||||
Subscription subscription = container.register(new ChangeStreamRequest<>(listener, options), User.class); <4>
|
||||
|
||||
// ...
|
||||
|
||||
container.stop(); <5>
|
||||
container.stop(); <5>
|
||||
----
|
||||
<1> Starting the container initializes the resources and starts `Task` instances for already registered `SubscriptionRequest` instances. Requests added after startup are ran immediately.
|
||||
<2> Define the listener called when a `Message` is received. The `Message#getBody()` is converted to the requested domain type. Use `Document` to receive raw results without conversion.
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows:
|
||||
|
||||
|
||||
====
|
||||
.Java
|
||||
[source,java,role="primary"]
|
||||
@@ -81,7 +82,7 @@ class GridFsClient {
|
||||
|
||||
@Test
|
||||
public void findFilesInGridFs() {
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")));
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")))
|
||||
}
|
||||
}
|
||||
----
|
||||
@@ -109,7 +110,3 @@ class GridFsClient {
|
||||
====
|
||||
|
||||
`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database.
|
||||
|
||||
NOTE: By default, `GridFsTemplate` obtains `GridFSBucket` once upon the first GridFS interaction.
|
||||
After that, the Template instance reuses the cached bucket.
|
||||
To use different buckets, from the same Template instance use the constructor accepting `Supplier<GridFSBucket>`.
|
||||
|
||||
@@ -57,7 +57,7 @@ To create a Spring project in STS:
|
||||
<repository>
|
||||
<id>spring-milestone</id>
|
||||
<name>Spring Maven MILESTONE Repository</name>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<url>https://repo.spring.io/milestone</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
----
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 4.1 GA (2023.0.0)
|
||||
Spring Data MongoDB 4.1.1 (2023.0.1)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -45,5 +45,6 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user