Compare commits

...

19 Commits
3.3.5 ... 3.3.7

Author SHA1 Message Date
Spring Builds
5cf73168f7 Release version 3.3.7 (2021.1.7).
See #4115
2022-09-19 08:40:19 +00:00
Spring Builds
792d7199f0 Prepare 3.3.7 (2021.1.7).
See #4115
2022-09-19 08:38:01 +00:00
Christoph Strobl
34a2d09303 Fix issue with reference conversion in updates.
We now make sure to convert references in update operations targeting collection like fields when using eg. the push modifier.

Closes #4041
Original pull request: #4045.
2022-09-19 08:53:43 +02:00
Mark Paluch
26554e3031 Polishing.
See #4061
Original pull request: #4062.
2022-09-16 14:53:08 +02:00
Christoph Strobl
018fe623c0 Improve exception message when deriving collection name from type.
We now provide a better worded exception message when trying to derive the collection name for a type that is not considered a user types (such as org.bson.Document).
Update the Javadoc to hint to the error.

Closes #4061
Original pull request: #4062.
2022-09-16 14:53:06 +02:00
Christoph Strobl
27e6b5a9be Initialize lists with size where possible.
Closes #3941
Original pull request: #3974.
2022-09-16 14:44:11 +02:00
Mark Paluch
c9be849e62 Polishing.
Reformat code.

See #4167.
Original pull request: #4168.
2022-09-16 14:41:29 +02:00
Christoph Strobl
da5f24981c Fix usage of change stream option startAfter.
We now make sure to apply the token to startAfter method of the driver. Before this change it had been incorrectly applied to resumeAfter.

Closes #4167.
Original pull request: #4168.
2022-09-16 14:41:29 +02:00
Mark Paluch
7c7b05f10d Polishing.
Fix generics. Add warning suppressions for nullability checks.

See: #4104
Original pull request: #4156.
2022-09-14 14:07:27 +02:00
Christoph Strobl
ed4f30ab07 Fix GeoJson polygon conversion for polygons with inner ring.
Closes: #4104
Original pull request: #4156.
2022-09-14 14:07:26 +02:00
Christoph Strobl
cf38ba15bf Allow referencing the $id field of dbrefs within an aggregation pipeline.
Closes: #4123
Original pull request: #4125.
2022-08-05 14:10:58 +02:00
Sojin
daf12a6e2b Fix AKNOWLEDGED typo in reference documentation.
Two typos found have been updated

Closes #4135
2022-08-05 14:08:15 +02:00
Christoph Strobl
d0481d089e After release cleanups.
See #4090
2022-07-15 10:47:35 +02:00
Christoph Strobl
169c35789d Prepare next development iteration.
See #4090
2022-07-15 10:47:31 +02:00
Christoph Strobl
0de55deb03 Release version 3.3.6 (2021.1.6).
See #4090
2022-07-15 10:34:16 +02:00
Christoph Strobl
01ac35fa31 Prepare 3.3.6 (2021.1.6).
See #4090
2022-07-15 10:33:37 +02:00
Mark Paluch
f5c0318a14 Avoid duplicate bean registrations in MappingMongoConverterParser.
We now ensure to not override `ValidatingMongoEventListener` and `LocalValidatorFactoryBean` bean definitions by avoiding duplicate registrations and checking whether a bean with the given name is already registered.

Closes #4087
2022-06-28 10:25:15 +02:00
Mark Paluch
7a0debe335 After release cleanups.
See #4029
2022-06-20 11:12:22 +02:00
Mark Paluch
0a79ad6585 Prepare next development iteration.
See #4029
2022-06-20 11:12:19 +02:00
30 changed files with 293 additions and 97 deletions

View File

@@ -115,7 +115,7 @@ Use `<mongo:client-settings cluster-hosts="..." />` instead
| `<mongo:db-factory writeConcern="..." />`
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY
| W1, W2, W3, UNACKNOWLEDGED, ACKNOWLEDGED, JOURNALED, MAJORITY
|===
.Removed XML Namespace Elements and Attributes:

View File

@@ -5,7 +5,7 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>3.3.5</version>
<version>3.3.7</version>
<packaging>pom</packaging>
<name>Spring Data MongoDB</name>
@@ -15,7 +15,7 @@
<parent>
<groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId>
<version>2.6.5</version>
<version>2.6.7</version>
</parent>
<modules>
@@ -26,7 +26,7 @@
<properties>
<project.type>multi</project.type>
<dist.id>spring-data-mongodb</dist.id>
<springdata.commons>2.6.5</springdata.commons>
<springdata.commons>2.6.7</springdata.commons>
<mongo>4.4.2</mongo>
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
<jmh.version>1.19</jmh.version>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>3.3.5</version>
<version>3.3.7</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -14,7 +14,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>3.3.5</version>
<version>3.3.7</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -11,7 +11,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>3.3.5</version>
<version>3.3.7</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -24,7 +24,6 @@ import java.util.List;
import java.util.Set;
import org.springframework.beans.BeanMetadataElement;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.BeanDefinitionHolder;
import org.springframework.beans.factory.config.RuntimeBeanReference;
@@ -64,6 +63,7 @@ import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
import org.springframework.util.xml.DomUtils;
import org.w3c.dom.Element;
/**
@@ -135,9 +135,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider"));
}
try {
registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME);
} catch (NoSuchBeanDefinitionException ignored) {
if (!registry.containsBeanDefinition(INDEX_HELPER_BEAN_NAME)) {
BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder
.genericBeanDefinition(MongoPersistentEntityIndexCreator.class);
@@ -151,7 +149,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext);
if (validatingMongoEventListener != null) {
if (validatingMongoEventListener != null && !registry.containsBeanDefinition(VALIDATING_EVENT_LISTENER_BEAN_NAME)) {
parserContext.registerBeanComponent(
new BeanComponentDefinition(validatingMongoEventListener, VALIDATING_EVENT_LISTENER_BEAN_NAME));
}
@@ -165,15 +163,16 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) {
String disableValidation = element.getAttribute("disable-validation");
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation);
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.parseBoolean(disableValidation);
if (!validationDisabled) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition();
RuntimeBeanReference validator = getValidator(builder, parserContext);
RuntimeBeanReference validator = getValidator(element, parserContext);
if (validator != null) {
builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class);
builder.getRawBeanDefinition().setSource(element);
builder.addConstructorArgValue(validator);
return builder.getBeanDefinition();
@@ -195,7 +194,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
validatorDef.setSource(source);
validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef);
parserContext.registerBeanComponent(new BeanComponentDefinition(validatorDef, validatorName));
return new RuntimeBeanReference(validatorName);
}

View File

@@ -176,16 +176,20 @@ class CountQuery {
Document $geoWithinMin = new Document("$geoWithin",
new Document(spheric ? "$centerSphere" : "$center", $centerMin));
List<Document> criteria = new ArrayList<>();
List<Document> criteria;
if ($and != null) {
if ($and instanceof Collection) {
criteria.addAll((Collection) $and);
Collection andElements = (Collection) $and;
criteria = new ArrayList<>(andElements.size() + 2);
criteria.addAll(andElements);
} else {
throw new IllegalArgumentException(
"Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: "
+ $and);
}
} else {
criteria = new ArrayList<>(2);
}
criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin))));

View File

@@ -188,7 +188,8 @@ public class DefaultIndexOperations implements IndexOperations {
private List<IndexInfo> getIndexData(MongoCursor<Document> cursor) {
List<IndexInfo> indexInfoList = new ArrayList<>();
int available = cursor.available();
List<IndexInfo> indexInfoList = available > 0 ? new ArrayList<>(available) : new ArrayList<>();
while (cursor.hasNext()) {

View File

@@ -138,7 +138,14 @@ class EntityOperations {
"No class parameter provided, entity collection can't be determined!");
}
return context.getRequiredPersistentEntity(entityClass).getCollection();
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(entityClass);
if (persistentEntity == null) {
throw new MappingException(String.format(
"Cannot determine collection name from type '%s'. Is it a store native type?", entityClass.getName()));
}
return persistentEntity.getCollection();
}
public Query getByIdInQuery(Collection<?> entities) {

View File

@@ -80,6 +80,7 @@ public interface MongoOperations extends FluentMongoOperations {
*
* @param entityClass must not be {@literal null}.
* @return never {@literal null}.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type.
*/
String getCollectionName(Class<?> entityClass);
@@ -968,6 +969,8 @@ public interface MongoOperations extends FluentMongoOperations {
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @return the converted object that was updated or {@literal null}, if not found.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 2.1
*/
@Nullable
@@ -1009,6 +1012,8 @@ public interface MongoOperations extends FluentMongoOperations {
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
* as it is after the update.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 2.1
*/
@Nullable
@@ -1082,6 +1087,8 @@ public interface MongoOperations extends FluentMongoOperations {
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
* as it is after the update.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 2.1
*/
@Nullable
@@ -1171,6 +1178,8 @@ public interface MongoOperations extends FluentMongoOperations {
* {@literal null}.
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
* @return the count of matching documents.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
*/
long count(Query query, Class<?> entityClass);
@@ -1205,6 +1214,8 @@ public interface MongoOperations extends FluentMongoOperations {
*
* @param entityClass must not be {@literal null}.
* @return the estimated number of documents.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @since 3.1
*/
default long estimatedCount(Class<?> entityClass) {
@@ -1265,6 +1276,8 @@ public interface MongoOperations extends FluentMongoOperations {
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @return the inserted object.
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given object type.
*/
<T> T insert(T objectToSave);
@@ -1291,6 +1304,8 @@ public interface MongoOperations extends FluentMongoOperations {
* @param batchToSave the batch of objects to save. Must not be {@literal null}.
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
* @return the inserted objects that.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
*/
<T> Collection<T> insert(Collection<? extends T> batchToSave, Class<?> entityClass);
@@ -1309,6 +1324,8 @@ public interface MongoOperations extends FluentMongoOperations {
*
* @param objectsToSave the list of objects to save. Must not be {@literal null}.
* @return the inserted objects.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} for the given objects.
*/
<T> Collection<T> insertAll(Collection<? extends T> objectsToSave);
@@ -1330,6 +1347,8 @@ public interface MongoOperations extends FluentMongoOperations {
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @return the saved object.
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given object type.
*/
<T> T save(T objectToSave);
@@ -1366,9 +1385,11 @@ public interface MongoOperations extends FluentMongoOperations {
* the existing object. Must not be {@literal null}.
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous write.
* @since 3.0
* @see Update
* @see AggregationUpdate
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @since 3.0
*/
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass);
@@ -1420,9 +1441,11 @@ public interface MongoOperations extends FluentMongoOperations {
* the existing. Must not be {@literal null}.
* @param entityClass class that determines the collection to use.
* @return the {@link UpdateResult} which lets you access the results of the previous write.
* @since 3.0
* @see Update
* @see AggregationUpdate
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @since 3.0
*/
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass);
@@ -1474,9 +1497,11 @@ public interface MongoOperations extends FluentMongoOperations {
* the existing. Must not be {@literal null}.
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous write.
* @since 3.0
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @see Update
* @see AggregationUpdate
* @since 3.0
*/
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass);
@@ -1524,6 +1549,8 @@ public interface MongoOperations extends FluentMongoOperations {
*
* @param object must not be {@literal null}.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given object type.
*/
DeleteResult remove(Object object);
@@ -1547,6 +1574,8 @@ public interface MongoOperations extends FluentMongoOperations {
* @param entityClass class that determines the collection to use.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws IllegalArgumentException when {@literal query} or {@literal entityClass} is {@literal null}.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
*/
DeleteResult remove(Query query, Class<?> entityClass);
@@ -1594,6 +1623,8 @@ public interface MongoOperations extends FluentMongoOperations {
* @param query the query document that specifies the criteria used to find and remove documents.
* @param entityClass class of the pojo to be operated on.
* @return the {@link List} converted objects deleted by this operation.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @since 1.5
*/
<T> List<T> findAllAndRemove(Query query, Class<T> entityClass);

View File

@@ -987,7 +987,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
DocumentCallback<GeoResult<T>> callback = new GeoNearResultDocumentCallback<>(distanceField,
new ProjectingReadCallback<>(mongoConverter, domainType, returnType, collection), near.getMetric());
List<GeoResult<T>> result = new ArrayList<>();
List<GeoResult<T>> result = new ArrayList<>(results.getMappedResults().size());
BigDecimal aggregate = BigDecimal.ZERO;
for (Document element : results) {
@@ -1345,7 +1345,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
Assert.notNull(writer, "MongoWriter must not be null!");
List<Document> documentList = new ArrayList<>();
List<Document> documentList = new ArrayList<>(batchToSave.size());
List<T> initializedBatchToSave = new ArrayList<>(batchToSave.size());
for (T uninitialized : batchToSave) {
@@ -2852,7 +2852,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
.iterator()) {
List<T> result = new ArrayList<>();
int available = cursor.available();
List<T> result = available > 0 ? new ArrayList<>(available) : new ArrayList<>();
while (cursor.hasNext()) {
Document object = cursor.next();

View File

@@ -756,6 +756,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @return the converted object that was updated or {@link Mono#empty()}, if not found.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 2.1
*/
default <T> Mono<T> findAndReplace(Query query, T replacement) {
@@ -795,6 +797,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
* as it is after the update.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 2.1
*/
default <T> Mono<T> findAndReplace(Query query, T replacement, FindAndReplaceOptions options) {
@@ -865,6 +869,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of
* {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or
* as it is after the update.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 2.1
*/
default <S, T> Mono<T> findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class<S> entityType,
@@ -951,6 +957,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* {@literal null}.
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
* @return the count of matching documents.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
*/
Mono<Long> count(Query query, Class<?> entityClass);
@@ -1007,6 +1015,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*
* @param entityClass must not be {@literal null}.
* @return a {@link Mono} emitting the estimated number of documents.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @since 3.1
*/
default Mono<Long> estimatedCount(Class<?> entityClass) {
@@ -1045,6 +1055,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @return the inserted object.
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given object type.
*/
<T> Mono<T> insert(T objectToSave);
@@ -1070,7 +1082,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*
* @param batchToSave the batch of objects to save. Must not be {@literal null}.
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
* @return the inserted objects .
* @return the inserted objects.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
*/
<T> Flux<T> insert(Collection<? extends T> batchToSave, Class<?> entityClass);
@@ -1089,6 +1103,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*
* @param objectsToSave the list of objects to save. Must not be {@literal null}.
* @return the saved objects.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} for the given objects.
*/
<T> Flux<T> insertAll(Collection<? extends T> objectsToSave);
@@ -1116,6 +1132,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @param batchToSave the publisher which provides objects to save. Must not be {@literal null}.
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
* @return the inserted objects.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} for the type.
*/
<T> Flux<T> insertAll(Mono<? extends Collection<? extends T>> batchToSave, Class<?> entityClass);
@@ -1155,6 +1173,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @return the saved object.
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given object type.
*/
<T> Mono<T> save(T objectToSave);
@@ -1191,6 +1211,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
* @return the saved object.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given object type.
*/
<T> Mono<T> save(Mono<? extends T> objectToSave);
@@ -1224,6 +1246,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* the existing object. Must not be {@literal null}.
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous write.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @since 3.0
* @see Update
* @see AggregationUpdate
@@ -1278,6 +1302,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* the existing. Must not be {@literal null}.
* @param entityClass class that determines the collection to use.
* @return the {@link UpdateResult} which lets you access the results of the previous write.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @since 3.0
* @see Update
* @see AggregationUpdate
@@ -1333,6 +1359,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous write.
* @since 3.0
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* @see Update
* @see AggregationUpdate
*/
@@ -1379,6 +1407,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*
* @param object must not be {@literal null}.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given object type.
*/
Mono<DeleteResult> remove(Object object);
@@ -1396,6 +1426,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*
* @param objectToRemove must not be {@literal null}.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given object type.
*/
Mono<DeleteResult> remove(Mono<? extends Object> objectToRemove);
@@ -1415,6 +1447,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @param query the query document that specifies the criteria used to remove a record.
* @param entityClass class that determines the collection to use.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
*/
Mono<DeleteResult> remove(Query query, Class<?> entityClass);
@@ -1458,6 +1492,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @param query the query document that specifies the criteria used to find and remove documents.
* @param entityClass class of the pojo to be operated on.
* @return the {@link Flux} converted objects deleted by this operation.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
*/
<T> Flux<T> findAllAndRemove(Query query, Class<T> entityClass);
@@ -1489,6 +1525,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* specification.
* @param entityClass the parametrized type of the returned {@link Flux}.
* @return the {@link Flux} of converted objects.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
*/
<T> Flux<T> tail(Query query, Class<T> entityClass);
@@ -1633,6 +1671,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*
* @param entityClass must not be {@literal null}.
* @return never {@literal null}.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type.
* @since 2.1
*/
String getCollectionName(Class<?> entityClass);

View File

@@ -1632,7 +1632,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
LOGGER.debug("Inserting list of Documents containing " + dbDocList.size() + " items");
}
List<Document> documents = new ArrayList<>();
List<Document> documents = new ArrayList<>(dbDocList.size());
return execute(collectionName, collection -> {
@@ -2141,7 +2141,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class);
}
publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter).orElse(publisher);
if (options.isResumeAfter()) {
publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter)
.orElse(publisher);
} else if (options.isStartAfter()) {
publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::startAfter)
.orElse(publisher);
}
publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation)
.orElse(publisher);
publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher);

View File

@@ -675,10 +675,7 @@ public class ConditionalOperators {
if (value instanceof CriteriaDefinition) {
Document mappedObject = context.getMappedObject(((CriteriaDefinition) value).getCriteriaObject());
List<Object> clauses = new ArrayList<Object>();
clauses.addAll(getClauses(context, mappedObject));
List<Object> clauses = getClauses(context, mappedObject);
return clauses.size() == 1 ? clauses.get(0) : clauses;
}
@@ -705,7 +702,9 @@ public class ConditionalOperators {
if (predicate instanceof List) {
List<Object> args = new ArrayList<Object>();
List<?> predicates = (List<?>) predicate;
List<Object> args = new ArrayList<Object>(predicates.size());
for (Object clause : (List<?>) predicate) {
if (clause instanceof Document) {
args.addAll(getClauses(context, (Document) clause));
@@ -723,14 +722,14 @@ public class ConditionalOperators {
continue;
}
List<Object> args = new ArrayList<Object>();
List<Object> args = new ArrayList<Object>(2);
args.add("$" + key);
args.add(nested.get(s));
clauses.add(new Document(s, args));
}
} else if (!isKeyword(key)) {
List<Object> args = new ArrayList<Object>();
List<Object> args = new ArrayList<Object>(2);
args.add("$" + key);
args.add(predicate);
clauses.add(new Document("$eq", args));

View File

@@ -110,7 +110,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
private static ExposedFields createFields(Fields fields, boolean synthetic) {
Assert.notNull(fields, "Fields must not be null!");
List<ExposedField> result = new ArrayList<ExposedField>();
List<ExposedField> result = new ArrayList<ExposedField>(fields.size());
for (Field field : fields) {
result.add(new ExposedField(field, synthetic));

View File

@@ -167,6 +167,10 @@ public final class Fields implements Iterable<Field> {
return result;
}
public int size() {
return fields.size();
}
@Nullable
public Field getField(String name) {
@@ -267,7 +271,7 @@ public final class Fields implements Iterable<Field> {
*/
public String getTarget() {
if (isLocalVar()) {
if (isLocalVar() || pointsToDBRefId()) {
return this.getRaw();
}
@@ -296,6 +300,10 @@ public final class Fields implements Iterable<Field> {
return raw.startsWith("$$") && !raw.startsWith("$$$");
}
protected boolean pointsToDBRefId() { // see https://jira.mongodb.org/browse/SERVER-14466
return raw.endsWith(".$id");
}
/**
* @return
* @since 1.10

View File

@@ -126,7 +126,7 @@ public class GeoNearOperation implements AggregationOperation {
Document command = toDocument(context);
Number limit = (Number) command.get("$geoNear", Document.class).remove("num");
List<Document> stages = new ArrayList<>();
List<Document> stages = new ArrayList<>(3);
stages.add(command);
if (nearQuery.getSkip() != null && nearQuery.getSkip() > 0) {

View File

@@ -138,7 +138,7 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db
List<Document> result = mongoCollection //
.find(new Document(BasicMongoPersistentProperty.ID_FIELD_NAME, new Document("$in", ids))) //
.into(new ArrayList<>());
.into(new ArrayList<>(ids.size()));
return ids.stream() //
.flatMap(id -> documentWithId(id, result)) //

View File

@@ -24,6 +24,7 @@ import java.util.Map;
import java.util.TreeMap;
import org.bson.Document;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.convert.ReadingConverter;
import org.springframework.data.convert.WritingConverter;
@@ -44,12 +45,10 @@ import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
import org.springframework.data.mongodb.core.geo.GeoJsonPolygon;
import org.springframework.data.mongodb.core.geo.Sphere;
import org.springframework.data.mongodb.core.query.GeoCommand;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.NumberUtils;
import org.springframework.util.ObjectUtils;
import com.mongodb.BasicDBList;
import com.mongodb.Function;
/**
@@ -61,9 +60,9 @@ import com.mongodb.Function;
* @author Thiago Diniz da Silveira
* @since 1.5
*/
@SuppressWarnings("ConstantConditions")
abstract class GeoConverters {
private final static Map<String, Function<Document, GeoJson<?>>> converters;
static {
@@ -93,7 +92,6 @@ abstract class GeoConverters {
*
* @return never {@literal null}.
*/
@SuppressWarnings("unchecked")
public static Collection<? extends Object> getConvertersToRegister() {
return Arrays.asList( //
BoxToDocumentConverter.INSTANCE //
@@ -464,7 +462,7 @@ abstract class GeoConverters {
return null;
}
List argument = new ArrayList();
List<Object> argument = new ArrayList<>(2);
Shape shape = source.getShape();
@@ -484,7 +482,9 @@ abstract class GeoConverters {
} else if (shape instanceof Polygon) {
for (Point point : ((Polygon) shape).getPoints()) {
List<Point> points = ((Polygon) shape).getPoints();
argument = new ArrayList(points.size());
for (Point point : points) {
argument.add(toList(point));
}
@@ -502,8 +502,7 @@ abstract class GeoConverters {
* @author Christoph Strobl
* @since 1.7
*/
@SuppressWarnings("rawtypes")
enum GeoJsonToDocumentConverter implements Converter<GeoJson, Document> {
enum GeoJsonToDocumentConverter implements Converter<GeoJson<?>, Document> {
INSTANCE;
@@ -512,7 +511,7 @@ abstract class GeoConverters {
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
*/
@Override
public Document convert(GeoJson source) {
public Document convert(GeoJson<?> source) {
if (source == null) {
return null;
@@ -522,33 +521,33 @@ abstract class GeoConverters {
if (source instanceof GeoJsonGeometryCollection) {
List dbl = new ArrayList();
List<Object> dbl = new ArrayList<>();
for (GeoJson geometry : ((GeoJsonGeometryCollection) source).getCoordinates()) {
for (GeoJson<?> geometry : ((GeoJsonGeometryCollection) source).getCoordinates()) {
dbl.add(convert(geometry));
}
dbo.put("geometries", dbl);
} else {
dbo.put("coordinates", convertIfNecessarry(source.getCoordinates()));
dbo.put("coordinates", convertIfNecessary(source.getCoordinates()));
}
return dbo;
}
private Object convertIfNecessarry(Object candidate) {
private Object convertIfNecessary(Object candidate) {
if (candidate instanceof GeoJson) {
return convertIfNecessarry(((GeoJson) candidate).getCoordinates());
return convertIfNecessary(((GeoJson<?>) candidate).getCoordinates());
}
if (candidate instanceof Iterable) {
if (candidate instanceof Iterable<?>) {
List dbl = new ArrayList();
List<Object> dbl = new ArrayList<>();
for (Object element : (Iterable) candidate) {
dbl.add(convertIfNecessarry(element));
for (Object element : (Iterable<?>) candidate) {
dbl.add(convertIfNecessary(element));
}
return dbl;
@@ -648,7 +647,7 @@ abstract class GeoConverters {
Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "Polygon"),
String.format("Cannot convert type '%s' to Polygon.", source.get("type")));
return toGeoJsonPolygon((List) source.get("coordinates"));
return toGeoJsonPolygon((List<?>) source.get("coordinates"));
}
}
@@ -674,11 +673,11 @@ abstract class GeoConverters {
Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiPolygon"),
String.format("Cannot convert type '%s' to MultiPolygon.", source.get("type")));
List dbl = (List) source.get("coordinates");
List<?> dbl = (List<?>) source.get("coordinates");
List<GeoJsonPolygon> polygones = new ArrayList<>();
for (Object polygon : dbl) {
polygones.add(toGeoJsonPolygon((List) polygon));
polygones.add(toGeoJsonPolygon((List<?>) polygon));
}
return new GeoJsonMultiPolygon(polygones);
@@ -707,7 +706,7 @@ abstract class GeoConverters {
Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "LineString"),
String.format("Cannot convert type '%s' to LineString.", source.get("type")));
List cords = (List) source.get("coordinates");
List<?> cords = (List<?>) source.get("coordinates");
return new GeoJsonLineString(toListOfPoint(cords));
}
@@ -735,7 +734,7 @@ abstract class GeoConverters {
Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiPoint"),
String.format("Cannot convert type '%s' to MultiPoint.", source.get("type")));
List cords = (List) source.get("coordinates");
List<?> cords = (List<?>) source.get("coordinates");
return new GeoJsonMultiPoint(toListOfPoint(cords));
}
@@ -763,11 +762,11 @@ abstract class GeoConverters {
Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiLineString"),
String.format("Cannot convert type '%s' to MultiLineString.", source.get("type")));
List<GeoJsonLineString> lines = new ArrayList<GeoJsonLineString>();
List cords = (List) source.get("coordinates");
List<GeoJsonLineString> lines = new ArrayList<>();
List<?> cords = (List<?>) source.get("coordinates");
for (Object line : cords) {
lines.add(new GeoJsonLineString(toListOfPoint((List) line)));
lines.add(new GeoJsonLineString(toListOfPoint((List<?>) line)));
}
return new GeoJsonMultiLineString(lines);
}
@@ -810,16 +809,16 @@ abstract class GeoConverters {
}
/**
* Converts a coordinate pairs nested in in {@link BasicDBList} into {@link GeoJsonPoint}s.
* Converts a coordinate pairs nested in {@link List} into {@link GeoJsonPoint}s.
*
* @param listOfCoordinatePairs must not be {@literal null}.
* @return never {@literal null}.
* @since 1.7
*/
@SuppressWarnings("unchecked")
static List<Point> toListOfPoint(List listOfCoordinatePairs) {
static List<Point> toListOfPoint(List<?> listOfCoordinatePairs) {
List<Point> points = new ArrayList<>();
List<Point> points = new ArrayList<>(listOfCoordinatePairs.size());
for (Object point : listOfCoordinatePairs) {
@@ -834,14 +833,16 @@ abstract class GeoConverters {
}
/**
* Converts a coordinate pairs nested in in {@link BasicDBList} into {@link GeoJsonPolygon}.
* Converts a coordinate pairs nested in {@link List} into {@link GeoJsonPolygon}.
*
* @param dbList must not be {@literal null}.
* @return never {@literal null}.
* @since 1.7
*/
static GeoJsonPolygon toGeoJsonPolygon(List dbList) {
return new GeoJsonPolygon(toListOfPoint((List) dbList.get(0)));
static GeoJsonPolygon toGeoJsonPolygon(List<?> dbList) {
GeoJsonPolygon polygon = new GeoJsonPolygon(toListOfPoint((List<?>) dbList.get(0)));
return dbList.size() > 1 ? polygon.withInnerRing(toListOfPoint((List<?>) dbList.get(1))) : polygon;
}
/**
@@ -852,17 +853,11 @@ abstract class GeoConverters {
* @author Christoph Strobl
*/
@ReadingConverter
enum DocumentToGeoJsonConverter implements Converter<Document, GeoJson> {
enum DocumentToGeoJsonConverter implements Converter<Document, GeoJson<?>> {
INSTANCE;
/*
* (non-Javadoc)
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
*/
@Nullable
@Override
public GeoJson convert(Document source) {
public GeoJson<?> convert(Document source) {
return toGenericGeoJson(source);
}
}
@@ -871,22 +866,21 @@ abstract class GeoConverters {
String type = source.get("type", String.class);
if(type != null) {
if (type != null) {
Function<Document, GeoJson<?>> converter = converters.get(type);
if(converter != null){
if (converter != null) {
return converter.apply(source);
}
}
throw new IllegalArgumentException(
String.format("No converter found capable of converting GeoJson type %s.", type));
throw new IllegalArgumentException(String.format("No converter found capable of converting GeoJson type %s.", type));
}
private static double toPrimitiveDoubleValue(Object value) {
Assert.isInstanceOf(Number.class, value, "Argument must be a Number.");
return NumberUtils.convertNumberToTargetClass((Number) value, Double.class).doubleValue();
return NumberUtils.convertNumberToTargetClass((Number) value, Double.class);
}
}

View File

@@ -886,14 +886,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
}).collect(Collectors.toList());
return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class),
new ArrayList<>());
new ArrayList<>(targetCollection.size()));
}
if (property.hasExplicitWriteTarget()) {
return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>());
return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>(collection.size()));
}
return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>());
return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>(collection.size()));
}
List<Object> dbList = new ArrayList<>(collection.size());
@@ -978,7 +978,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
collection.add(getPotentiallyConvertedSimpleWrite(element,
componentType != null ? componentType.getType() : Object.class));
} else if (element instanceof Collection || elementType.isArray()) {
collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new ArrayList<>()));
Collection<?> objects = BsonUtils.asCollection(element);
collection.add(writeCollectionInternal(objects, componentType, new ArrayList<>(objects.size())));
} else {
Document document = new Document();
writeInternal(element, document, componentType);

View File

@@ -363,7 +363,7 @@ public class QueryMapper {
if (keyword.isOrOrNor() || (keyword.hasIterableValue() && !keyword.isGeometry())) {
Iterable<?> conditions = keyword.getValue();
List<Object> newConditions = new ArrayList<>();
List<Object> newConditions = conditions instanceof Collection ? new ArrayList<>(((Collection<?>) conditions).size()) : new ArrayList<>();
for (Object condition : conditions) {
newConditions.add(isDocument(condition) ? getMappedObject((Document) condition, entity)

View File

@@ -15,8 +15,10 @@
*/
package org.springframework.data.mongodb.core.convert;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map.Entry;
import org.bson.Document;
@@ -34,6 +36,7 @@ import org.springframework.data.mongodb.core.query.Update.Modifiers;
import org.springframework.data.util.ClassTypeInformation;
import org.springframework.data.util.TypeInformation;
import org.springframework.lang.Nullable;
import org.springframework.util.ObjectUtils;
/**
* A subclass of {@link QueryMapper} that retains type information on the mongo types.
@@ -221,8 +224,18 @@ public class UpdateMapper extends QueryMapper {
: getMappedSort(sortObject, field.getPropertyEntity());
}
TypeInformation<?> typeHint = field == null ? ClassTypeInformation.OBJECT : field.getTypeHint();
if (isAssociationConversionNecessary(field, value)) {
if (ObjectUtils.isArray(value) || value instanceof Collection) {
List<Object> targetPointers = new ArrayList<>();
for (Object val : converter.getConversionService().convert(value, List.class)) {
targetPointers.add(getMappedValue(field, val));
}
return targetPointers;
}
return super.getMappedValue(field, value);
}
TypeInformation<?> typeHint = field == null ? ClassTypeInformation.OBJECT : field.getTypeHint();
return converter.convertToMongoType(value, typeHint);
}

View File

@@ -24,6 +24,7 @@ import java.util.List;
import org.springframework.data.geo.Point;
import org.springframework.data.geo.Polygon;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
/**
* {@link GeoJson} representation of {@link Polygon}. Unlike {@link Polygon} the {@link GeoJsonPolygon} requires a
@@ -142,4 +143,28 @@ public class GeoJsonPolygon extends Polygon implements GeoJson<List<GeoJsonLineS
return result;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
GeoJsonPolygon that = (GeoJsonPolygon) o;
return ObjectUtils.nullSafeEquals(this.coordinates, that.coordinates);
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + ObjectUtils.nullSafeHashCode(coordinates);
return result;
}
}

View File

@@ -337,7 +337,7 @@ public class Criteria implements CriteriaDefinition {
* @see <a href="https://docs.mongodb.com/manual/reference/operator/query/mod/">MongoDB Query operator: $mod</a>
*/
public Criteria mod(Number value, Number remainder) {
List<Object> l = new ArrayList<Object>();
List<Object> l = new ArrayList<Object>(2);
l.add(value);
l.add(remainder);
criteria.put("$mod", l);

View File

@@ -223,8 +223,10 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
if (property.isAssociation()) {
if (next.getClass().isArray() || next instanceof Iterable) {
List<DBRef> dbRefs = new ArrayList<DBRef>();
for (Object element : asCollection(next)) {
Collection<?> values = asCollection(next);
List<DBRef> dbRefs = new ArrayList<DBRef>(values.size());
for (Object element : values) {
dbRefs.add(writer.toDBRef(element, property));
}
@@ -258,11 +260,14 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor {
if (source instanceof Iterable) {
List<Object> result = new ArrayList<Object>();
if(source instanceof Collection) {
return new ArrayList<>((Collection<?>) source);
}
List<Object> result = new ArrayList<>();
for (Object element : (Iterable<?>) source) {
result.add(element);
}
return result;
}

View File

@@ -131,13 +131,15 @@ public class MappingMongoConverterParserIntegrationTests {
private void loadConfiguration(String configLocation) {
factory = new DefaultListableBeanFactory();
factory.setAllowBeanDefinitionOverriding(false);
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory);
reader.loadBeanDefinitions(new ClassPathResource(configLocation));
}
private static void assertStrategyReferenceSetFor(String beanId) {
BeanDefinitionRegistry factory = new DefaultListableBeanFactory();
DefaultListableBeanFactory factory = new DefaultListableBeanFactory();
factory.setAllowBeanDefinitionOverriding(false);
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory);
reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-custom-fieldnamingstrategy.xml"));

View File

@@ -903,6 +903,29 @@ public class MongoTemplateDocumentReferenceTests {
assertThat(target).containsEntry("toB", "b");
}
@Test // GH-4041
void updateReferenceWithPushToCollection() {
WithListOfRefs a = new WithListOfRefs();
a.id = "a";
template.save(a);
WithListOfRefs b = new WithListOfRefs();
b.id = "b";
template.save(b);
template.update(WithListOfRefs.class).matching(where("id").is(a.id))
.apply(new Update().push("refs").each(new Object[] { b })).first();
String collection = template.getCollectionName(WithListOfRefs.class);
Document target = template.execute(db -> {
return db.getCollection(collection).find(Filters.eq("_id", "a")).first();
});
assertThat(target).containsEntry("refs", Collections.singletonList("b"));
}
@Test // GH-3782
void updateReferenceHavingCustomizedIdTargetType() {
@@ -1584,4 +1607,11 @@ public class MongoTemplateDocumentReferenceTests {
return publisher;
}
}
@Data
public static class WithListOfRefs {
@Id private String id;
@DocumentReference private List<WithListOfRefs> refs;
}
}

View File

@@ -39,6 +39,8 @@ import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.assertj.core.api.Assertions;
import org.bson.BsonDocument;
import org.bson.BsonString;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
@@ -110,6 +112,7 @@ import com.mongodb.client.result.InsertManyResult;
import com.mongodb.client.result.InsertOneResult;
import com.mongodb.client.result.UpdateResult;
import com.mongodb.reactivestreams.client.AggregatePublisher;
import com.mongodb.reactivestreams.client.ChangeStreamPublisher;
import com.mongodb.reactivestreams.client.DistinctPublisher;
import com.mongodb.reactivestreams.client.FindPublisher;
import com.mongodb.reactivestreams.client.MapReducePublisher;
@@ -145,6 +148,7 @@ public class ReactiveMongoTemplateUnitTests {
@Mock DistinctPublisher distinctPublisher;
@Mock Publisher deletePublisher;
@Mock MapReducePublisher mapReducePublisher;
@Mock ChangeStreamPublisher changeStreamPublisher;
private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator();
private MappingMongoConverter converter;
@@ -1455,6 +1459,24 @@ public class ReactiveMongoTemplateUnitTests {
.granularity(TimeSeriesGranularity.HOURS).toString());
}
@Test // GH-4167
void changeStreamOptionStartAftershouldApplied() {
when(factory.getMongoDatabase(anyString())).thenReturn(Mono.just(db));
when(collection.watch(any(Class.class))).thenReturn(changeStreamPublisher);
when(changeStreamPublisher.batchSize(anyInt())).thenReturn(changeStreamPublisher);
when(changeStreamPublisher.startAfter(any())).thenReturn(changeStreamPublisher);
when(changeStreamPublisher.fullDocument(any())).thenReturn(changeStreamPublisher);
BsonDocument token = new BsonDocument("token", new BsonString("id"));
template
.changeStream("database", "collection", ChangeStreamOptions.builder().startAfter(token).build(), Object.class)
.subscribe();
verify(changeStreamPublisher).startAfter(eq(token));
}
private void stubFindSubscribe(Document document) {
Publisher<Document> realPublisher = Flux.just(document);

View File

@@ -117,6 +117,13 @@ public class FieldsUnitTests {
assertThat(Fields.field("$$$$target").getTarget()).isEqualTo("target");
}
@Test // GH-4123
public void keepsRawMappingToDbRefId() {
assertThat(Fields.field("$id").getName()).isEqualTo("id");
assertThat(Fields.field("person.$id").getTarget()).isEqualTo("person.$id");
}
private static void verify(Field field, String name, String target) {
assertThat(field).isNotNull();

View File

@@ -1,4 +1,4 @@
Spring Data MongoDB 3.3.5 (2021.1.5)
Spring Data MongoDB 3.3.7 (2021.1.7)
Copyright (c) [2010-2019] Pivotal Software, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License").
@@ -34,6 +34,8 @@ conditions of the subcomponent's license, as noted in the LICENSE file.