Compare commits
42 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
20a8b5e5f1 | ||
|
|
3c25ed8378 | ||
|
|
6c21eab84b | ||
|
|
f00e8ed93c | ||
|
|
34a35bd489 | ||
|
|
ba2b65cfd5 | ||
|
|
2c9975e8db | ||
|
|
781ba63226 | ||
|
|
d855a0b07d | ||
|
|
d809ee0104 | ||
|
|
d138296123 | ||
|
|
e11560dffc | ||
|
|
4fbf01467e | ||
|
|
012d1245b0 | ||
|
|
d8eb0f124a | ||
|
|
bfeb896c70 | ||
|
|
efffc936fa | ||
|
|
563a3fb845 | ||
|
|
50ae6fd045 | ||
|
|
ae0e240334 | ||
|
|
852a461429 | ||
|
|
2cbed2a052 | ||
|
|
95667edec3 | ||
|
|
c1a52de8e5 | ||
|
|
7e94c1bdc3 | ||
|
|
c3259e395c | ||
|
|
3526b6a2d8 | ||
|
|
cb70a97ea8 | ||
|
|
52415bc702 | ||
|
|
43de140842 | ||
|
|
15b000ecce | ||
|
|
e428b9b977 | ||
|
|
6e38610ac1 | ||
|
|
e7af70efca | ||
|
|
39f5f91261 | ||
|
|
c48daa6d56 | ||
|
|
11356cd20f | ||
|
|
7385262c47 | ||
|
|
259938588a | ||
|
|
a1b4e3fc55 | ||
|
|
76479820bc | ||
|
|
c47bbc4a20 |
3
.mvn/wrapper/maven-wrapper.properties
vendored
3
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1 +1,2 @@
|
||||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
|
||||
#Fri Sep 10 15:39:33 CEST 2021
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.2/apache-maven-3.8.2-bin.zip
|
||||
|
||||
6
pom.xml
6
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.3</version>
|
||||
<version>3.2.8</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.5.3</version>
|
||||
<version>2.5.8</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,7 +26,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.5.3</springdata.commons>
|
||||
<springdata.commons>2.5.8</springdata.commons>
|
||||
<mongo>4.2.3</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.3</version>
|
||||
<version>3.2.8</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.3</version>
|
||||
<version>3.2.8</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.3</version>
|
||||
<version>3.2.8</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -104,7 +104,8 @@ public class MongoDatabaseUtils {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER
|
||||
|| !TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
|
||||
@@ -138,6 +138,10 @@ public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER) {
|
||||
return getMongoDatabaseOrDefault(dbName, factory);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
@@ -15,13 +15,20 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
|
||||
/**
|
||||
* {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to
|
||||
* define in which type of transactions to participate if any.
|
||||
* {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to
|
||||
* participate if any.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @see MongoTemplate#setSessionSynchronization(SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
* @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization)
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public enum SessionSynchronization {
|
||||
|
||||
@@ -34,5 +41,12 @@ public enum SessionSynchronization {
|
||||
/**
|
||||
* Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}.
|
||||
*/
|
||||
ON_ACTUAL_TRANSACTION;
|
||||
ON_ACTUAL_TRANSACTION,
|
||||
|
||||
/**
|
||||
* Do not participate in ongoing transactions.
|
||||
*
|
||||
* @since 3.2.5
|
||||
*/
|
||||
NEVER;
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
* @author Christoph Strobl
|
||||
* @since 3.1
|
||||
*/
|
||||
class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEntities> {
|
||||
public class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEntities> {
|
||||
|
||||
private final MappingMongoConverter converter;
|
||||
|
||||
|
||||
@@ -156,5 +156,14 @@ public class MappedDocument {
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return delegate.getArrayFilters();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public boolean hasArrayFilters() {
|
||||
return delegate.hasArrayFilters();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -613,7 +613,7 @@ class QueryOperations {
|
||||
|
||||
UpdateContext(MappedDocument update, boolean upsert) {
|
||||
|
||||
super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
|
||||
super(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())));
|
||||
this.multi = false;
|
||||
this.upsert = upsert;
|
||||
this.mappedDocument = update;
|
||||
|
||||
@@ -1803,8 +1803,9 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
Document projections = new Document();
|
||||
|
||||
Fields fields = context.getFields(type);
|
||||
fields.forEach(it -> projections.append(it.getName(), 1));
|
||||
return context.getMappedObject(projections, type);
|
||||
|
||||
fields.forEach(it -> projections.append(it.getTarget(), 1));
|
||||
return projections;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
|
||||
@@ -122,13 +122,13 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return AggregationOperationContext.super.getFields(type);
|
||||
}
|
||||
|
||||
List<String> fields = new ArrayList<>();
|
||||
List<Field> fields = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty property : entity) {
|
||||
fields.add(property.getName());
|
||||
fields.add(Fields.field(property.getName(), property.getFieldName()));
|
||||
}
|
||||
|
||||
return Fields.fields(fields.toArray(new String[0]));
|
||||
return Fields.from(fields.toArray(new Field[0]));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -142,12 +142,13 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
|
||||
/**
|
||||
* This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for
|
||||
* its existence. Typically the {@link AggregationOperationContext} fails when referencing unknown fields, those that
|
||||
* its existence. Typically, the {@link AggregationOperationContext} fails when referencing unknown fields, those that
|
||||
* are not present in one of the previous stages or the input source, throughout the pipeline.
|
||||
*
|
||||
* @param type The domain type to map fields to.
|
||||
* @return a more relaxed {@link AggregationOperationContext}.
|
||||
* @since 3.1
|
||||
* @see RelaxedTypeBasedAggregationOperationContext
|
||||
*/
|
||||
public AggregationOperationContext continueOnMissingFieldReference(Class<?> type) {
|
||||
return new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, mapper);
|
||||
|
||||
@@ -135,7 +135,7 @@ class DocumentAccessor {
|
||||
*/
|
||||
@Nullable
|
||||
public Object getRawId(MongoPersistentEntity<?> entity) {
|
||||
return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.asMap(document).get("_id");
|
||||
return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.get(document, "_id");
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -25,7 +25,6 @@ import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
@@ -1192,21 +1191,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return map;
|
||||
}
|
||||
|
||||
for (Entry<String, Object> entry : sourceMap.entrySet()) {
|
||||
sourceMap.forEach((k, v) -> {
|
||||
|
||||
if (typeMapper.isTypeKey(entry.getKey())) {
|
||||
continue;
|
||||
if (typeMapper.isTypeKey(k)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Object key = potentiallyUnescapeMapKey(entry.getKey());
|
||||
Object key = potentiallyUnescapeMapKey(k);
|
||||
|
||||
if (!rawKeyType.isAssignableFrom(key.getClass())) {
|
||||
key = doConvert(key, rawKeyType);
|
||||
}
|
||||
|
||||
Object value = entry.getValue();
|
||||
Object value = v;
|
||||
map.put(key, value == null ? value : context.convert(value, valueType));
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
@@ -40,13 +40,14 @@ import com.mongodb.DBRef;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Ryan Gibb
|
||||
*/
|
||||
public interface MongoConverter
|
||||
extends EntityConverter<MongoPersistentEntity<?>, MongoPersistentProperty, Object, Bson>, MongoWriter<Object>,
|
||||
EntityReader<Object, Bson> {
|
||||
|
||||
/**
|
||||
* Returns thw {@link TypeMapper} being used to write type information into {@link Document}s created with that
|
||||
* Returns the {@link TypeMapper} being used to write type information into {@link Document}s created with that
|
||||
* converter.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
@@ -139,6 +140,9 @@ public interface MongoConverter
|
||||
if (ObjectId.isValid(id.toString())) {
|
||||
return new ObjectId(id.toString());
|
||||
}
|
||||
|
||||
// avoid ConversionException as convertToMongoType will return String anyways.
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -69,6 +69,7 @@ import com.mongodb.DBRef;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author David Julia
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
@@ -192,12 +193,11 @@ public class QueryMapper {
|
||||
Assert.notNull(sortObject, "SortObject must not be null!");
|
||||
|
||||
if (sortObject.isEmpty()) {
|
||||
return new Document();
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
Document mappedSort = mapFieldsToPropertyNames(sortObject, entity);
|
||||
mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT);
|
||||
return mappedSort;
|
||||
return mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -214,42 +214,51 @@ public class QueryMapper {
|
||||
Assert.notNull(fieldsObject, "FieldsObject must not be null!");
|
||||
|
||||
Document mappedFields = mapFieldsToPropertyNames(fieldsObject, entity);
|
||||
mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE);
|
||||
return mappedFields;
|
||||
return mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE);
|
||||
}
|
||||
|
||||
private Document mapFieldsToPropertyNames(Document fields, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (fields.isEmpty()) {
|
||||
return new Document();
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
|
||||
}
|
||||
Document target = new Document();
|
||||
for (Map.Entry<String, Object> entry : BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).entrySet()) {
|
||||
|
||||
Field field = createPropertyField(entity, entry.getKey(), mappingContext);
|
||||
BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).forEach((k, v) -> {
|
||||
|
||||
Field field = createPropertyField(entity, k, mappingContext);
|
||||
if (field.getProperty() != null && field.getProperty().isUnwrapped()) {
|
||||
continue;
|
||||
return;
|
||||
}
|
||||
|
||||
target.put(field.getMappedKey(), entry.getValue());
|
||||
}
|
||||
target.put(field.getMappedKey(), v);
|
||||
});
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
private void mapMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity, MetaMapping metaMapping) {
|
||||
private Document mapMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity,
|
||||
MetaMapping metaMapping) {
|
||||
|
||||
if (entity == null) {
|
||||
return;
|
||||
return source;
|
||||
}
|
||||
|
||||
if (entity.hasTextScoreProperty() && !MetaMapping.IGNORE.equals(metaMapping)) {
|
||||
|
||||
if (source == BsonUtils.EMPTY_DOCUMENT) {
|
||||
source = new Document();
|
||||
}
|
||||
|
||||
MongoPersistentProperty textScoreProperty = entity.getTextScoreProperty();
|
||||
if (MetaMapping.FORCE.equals(metaMapping)
|
||||
|| (MetaMapping.WHEN_PRESENT.equals(metaMapping) && source.containsKey(textScoreProperty.getFieldName()))) {
|
||||
source.putAll(getMappedTextScoreField(textScoreProperty));
|
||||
}
|
||||
}
|
||||
|
||||
return source;
|
||||
}
|
||||
|
||||
private Document filterUnwrappedObjects(Document fieldsObject, @Nullable MongoPersistentEntity<?> entity) {
|
||||
@@ -678,7 +687,7 @@ public class QueryMapper {
|
||||
private Entry<String, Object> createMapEntry(String key, @Nullable Object value) {
|
||||
|
||||
Assert.hasText(key, "Key must not be null or empty!");
|
||||
return Collections.singletonMap(key, value).entrySet().iterator().next();
|
||||
return new AbstractMap.SimpleEntry<>(key, value);
|
||||
}
|
||||
|
||||
private DBRef createDbRefFor(Object source, MongoPersistentProperty property) {
|
||||
@@ -727,13 +736,13 @@ public class QueryMapper {
|
||||
return false;
|
||||
}
|
||||
|
||||
Set<String> keys = BsonUtils.asMap((Bson) candidate).keySet();
|
||||
Map<String, Object> map = BsonUtils.asMap((Bson) candidate);
|
||||
|
||||
if (keys.size() != 1) {
|
||||
if (map.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isKeyword(keys.iterator().next());
|
||||
return isKeyword(map.entrySet().iterator().next().getKey());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -772,7 +781,8 @@ public class QueryMapper {
|
||||
@Nullable
|
||||
private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) {
|
||||
|
||||
if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) {
|
||||
if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()
|
||||
|| value instanceof Document || value instanceof DBObject) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -816,11 +826,14 @@ public class QueryMapper {
|
||||
|
||||
public Keyword(Bson bson) {
|
||||
|
||||
Set<String> keys = BsonUtils.asMap(bson).keySet();
|
||||
Assert.isTrue(keys.size() == 1, "Can only use a single value Document!");
|
||||
Map<String, Object> map = BsonUtils.asMap(bson);
|
||||
Assert.isTrue(map.size() == 1, "Can only use a single value Document!");
|
||||
|
||||
this.key = keys.iterator().next();
|
||||
this.value = BsonUtils.get(bson, key);
|
||||
Set<Entry<String, Object>> entries = map.entrySet();
|
||||
Entry<String, Object> entry = entries.iterator().next();
|
||||
|
||||
this.key = entry.getKey();
|
||||
this.value = entry.getValue();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1014,8 +1027,8 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?|\\.\\d+");
|
||||
private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+");
|
||||
private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?");
|
||||
private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+(?!$)");
|
||||
private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!";
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
@@ -1375,6 +1388,14 @@ public class QueryMapper {
|
||||
this.currentIndex = 0;
|
||||
}
|
||||
|
||||
String nextToken() {
|
||||
return pathParts.get(currentIndex + 1);
|
||||
}
|
||||
|
||||
boolean hasNexToken() {
|
||||
return pathParts.size() > currentIndex + 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
@@ -1384,31 +1405,26 @@ public class QueryMapper {
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property));
|
||||
|
||||
boolean inspect = iterator.hasNext();
|
||||
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
currentIndex++;
|
||||
|
||||
boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike() ;
|
||||
if(property.isMap() && currentPropertyRoot.equals(partial) && iterator.hasNext()){
|
||||
partial = iterator.next();
|
||||
currentIndex++;
|
||||
}
|
||||
|
||||
if (isPositional || property.isMap() && !currentPropertyRoot.equals(partial)) {
|
||||
mappedName.append(".").append(partial);
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
if (!hasNexToken()) {
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
if(currentIndex + 1 < pathParts.size()) {
|
||||
currentIndex++;
|
||||
currentPropertyRoot = pathParts.get(currentIndex);
|
||||
String nextToken = nextToken();
|
||||
if (isPositionalParameter(nextToken)) {
|
||||
|
||||
mappedName.append(".").append(nextToken);
|
||||
currentIndex += 2;
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
if (property.isMap()) {
|
||||
|
||||
mappedName.append(".").append(nextToken);
|
||||
currentIndex += 2;
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
currentIndex++;
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ import org.springframework.util.ObjectUtils;
|
||||
* {@link GeoJsonMultiPoint} is defined as list of {@link Point}s.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Ivan Volzhev
|
||||
* @since 1.7
|
||||
* @see <a href="https://geojson.org/geojson-spec.html#multipoint">https://geojson.org/geojson-spec.html#multipoint</a>
|
||||
*/
|
||||
@@ -37,17 +38,31 @@ public class GeoJsonMultiPoint implements GeoJson<Iterable<Point>> {
|
||||
|
||||
private final List<Point> points;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}.
|
||||
*
|
||||
* @param point must not be {@literal null}.
|
||||
* @since 3.2.5
|
||||
*/
|
||||
public GeoJsonMultiPoint(Point point) {
|
||||
|
||||
Assert.notNull(point, "Point must not be null!");
|
||||
|
||||
this.points = new ArrayList<>();
|
||||
this.points.add(point);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}s.
|
||||
*
|
||||
* @param points points must not be {@literal null} and have at least 2 entries.
|
||||
* @param points points must not be {@literal null} and not empty
|
||||
*/
|
||||
public GeoJsonMultiPoint(List<Point> points) {
|
||||
|
||||
Assert.notNull(points, "Points must not be null.");
|
||||
Assert.isTrue(points.size() >= 2, "Minimum of 2 Points required.");
|
||||
Assert.notNull(points, "Points must not be null!");
|
||||
Assert.notEmpty(points, "Points must contain at least one point!");
|
||||
|
||||
this.points = new ArrayList<Point>(points);
|
||||
this.points = new ArrayList<>(points);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -63,7 +78,7 @@ public class GeoJsonMultiPoint implements GeoJson<Iterable<Point>> {
|
||||
Assert.notNull(second, "Second point must not be null!");
|
||||
Assert.notNull(others, "Additional points must not be null!");
|
||||
|
||||
this.points = new ArrayList<Point>();
|
||||
this.points = new ArrayList<>();
|
||||
this.points.add(first);
|
||||
this.points.add(second);
|
||||
this.points.addAll(Arrays.asList(others));
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
@@ -192,7 +193,7 @@ public class Field {
|
||||
*/
|
||||
public Field slice(String field, int offset, int size) {
|
||||
|
||||
slices.put(field, new Integer[] { offset, size });
|
||||
slices.put(field, Arrays.asList(offset, size));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@@ -49,8 +49,8 @@ public class Meta {
|
||||
}
|
||||
}
|
||||
|
||||
private final Map<String, Object> values = new LinkedHashMap<>(2);
|
||||
private final Set<CursorOption> flags = new LinkedHashSet<>();
|
||||
private Map<String, Object> values = Collections.emptyMap();
|
||||
private Set<CursorOption> flags = Collections.emptySet();
|
||||
private Integer cursorBatchSize;
|
||||
private Boolean allowDiskUse;
|
||||
|
||||
@@ -63,8 +63,9 @@ public class Meta {
|
||||
* @param source
|
||||
*/
|
||||
Meta(Meta source) {
|
||||
this.values.putAll(source.values);
|
||||
this.flags.addAll(source.flags);
|
||||
|
||||
this.values = new LinkedHashMap<>(source.values);
|
||||
this.flags = new LinkedHashSet<>(source.flags);
|
||||
this.cursorBatchSize = source.cursorBatchSize;
|
||||
this.allowDiskUse = source.allowDiskUse;
|
||||
}
|
||||
@@ -158,6 +159,11 @@ public class Meta {
|
||||
public boolean addFlag(CursorOption option) {
|
||||
|
||||
Assert.notNull(option, "CursorOption must not be null!");
|
||||
|
||||
if (this.flags == Collections.EMPTY_SET) {
|
||||
this.flags = new LinkedHashSet<>(2);
|
||||
}
|
||||
|
||||
return this.flags.add(option);
|
||||
}
|
||||
|
||||
@@ -220,6 +226,10 @@ public class Meta {
|
||||
|
||||
Assert.hasText(key, "Meta key must not be 'null' or blank.");
|
||||
|
||||
if (values == Collections.EMPTY_MAP) {
|
||||
values = new LinkedHashMap<>(2);
|
||||
}
|
||||
|
||||
if (value == null || (value instanceof String && !StringUtils.hasText((String) value))) {
|
||||
this.values.remove(key);
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import static org.springframework.util.ObjectUtils.*;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
@@ -30,6 +31,7 @@ import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Order;
|
||||
@@ -52,7 +54,7 @@ public class Query {
|
||||
|
||||
private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
|
||||
|
||||
private final Set<Class<?>> restrictedTypes = new HashSet<>();
|
||||
private Set<Class<?>> restrictedTypes = Collections.emptySet();
|
||||
private final Map<String, CriteriaDefinition> criteria = new LinkedHashMap<>();
|
||||
private @Nullable Field fieldSpec = null;
|
||||
private Sort sort = Sort.unsorted();
|
||||
@@ -235,8 +237,15 @@ public class Query {
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.notNull(additionalTypes, "AdditionalTypes must not be null");
|
||||
|
||||
if (restrictedTypes == Collections.EMPTY_SET) {
|
||||
restrictedTypes = new HashSet<>(1 + additionalTypes.length);
|
||||
}
|
||||
|
||||
restrictedTypes.add(type);
|
||||
restrictedTypes.addAll(Arrays.asList(additionalTypes));
|
||||
|
||||
if (additionalTypes.length > 0) {
|
||||
restrictedTypes.addAll(Arrays.asList(additionalTypes));
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -246,6 +255,17 @@ public class Query {
|
||||
*/
|
||||
public Document getQueryObject() {
|
||||
|
||||
if (criteria.isEmpty() && restrictedTypes.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
if (criteria.size() == 1 && restrictedTypes.isEmpty()) {
|
||||
|
||||
for (CriteriaDefinition definition : criteria.values()) {
|
||||
return definition.getCriteriaObject();
|
||||
}
|
||||
}
|
||||
|
||||
Document document = new Document();
|
||||
|
||||
for (CriteriaDefinition definition : criteria.values()) {
|
||||
@@ -263,7 +283,7 @@ public class Query {
|
||||
* @return the field {@link Document}.
|
||||
*/
|
||||
public Document getFieldsObject() {
|
||||
return this.fieldSpec == null ? new Document() : fieldSpec.getFieldsObject();
|
||||
return this.fieldSpec == null ? BsonUtils.EMPTY_DOCUMENT : fieldSpec.getFieldsObject();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -272,13 +292,12 @@ public class Query {
|
||||
public Document getSortObject() {
|
||||
|
||||
if (this.sort.isUnsorted()) {
|
||||
return new Document();
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
Document document = new Document();
|
||||
|
||||
this.sort.stream()//
|
||||
.forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1));
|
||||
this.sort.forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1));
|
||||
|
||||
return document;
|
||||
}
|
||||
@@ -557,7 +576,7 @@ public class Query {
|
||||
target.limit = source.getLimit();
|
||||
target.hint = source.getHint();
|
||||
target.collation = source.getCollation();
|
||||
target.restrictedTypes.addAll(source.getRestrictedTypes());
|
||||
target.restrictedTypes = new HashSet<>(source.getRestrictedTypes());
|
||||
|
||||
if (source.getMeta().hasValues()) {
|
||||
target.setMeta(new Meta(source.getMeta()));
|
||||
|
||||
@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.core.query;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
@@ -157,7 +159,7 @@ public class TextQuery extends Query {
|
||||
return super.getFieldsObject();
|
||||
}
|
||||
|
||||
Document fields = super.getFieldsObject();
|
||||
Document fields = BsonUtils.asMutableDocument(super.getFieldsObject());
|
||||
|
||||
fields.put(getScoreFieldName(), META_TEXT_SCORE);
|
||||
return fields;
|
||||
@@ -170,15 +172,14 @@ public class TextQuery extends Query {
|
||||
@Override
|
||||
public Document getSortObject() {
|
||||
|
||||
Document sort = new Document();
|
||||
|
||||
if (this.sortByScore) {
|
||||
Document sort = new Document();
|
||||
sort.put(getScoreFieldName(), META_TEXT_SCORE);
|
||||
sort.putAll(super.getSortObject());
|
||||
return sort;
|
||||
}
|
||||
|
||||
sort.putAll(super.getSortObject());
|
||||
|
||||
return sort;
|
||||
return super.getSortObject();
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -56,10 +56,10 @@ public class Update implements UpdateDefinition {
|
||||
}
|
||||
|
||||
private boolean isolated = false;
|
||||
private Set<String> keysToUpdate = new HashSet<>();
|
||||
private Map<String, Object> modifierOps = new LinkedHashMap<>();
|
||||
private Map<String, PushOperatorBuilder> pushCommandBuilders = new LinkedHashMap<>(1);
|
||||
private List<ArrayFilter> arrayFilters = new ArrayList<>();
|
||||
private final Set<String> keysToUpdate = new HashSet<>();
|
||||
private final Map<String, Object> modifierOps = new LinkedHashMap<>();
|
||||
private Map<String, PushOperatorBuilder> pushCommandBuilders = Collections.emptyMap();
|
||||
private List<ArrayFilter> arrayFilters = Collections.emptyList();
|
||||
|
||||
/**
|
||||
* Static factory method to create an Update using the provided key
|
||||
@@ -193,6 +193,11 @@ public class Update implements UpdateDefinition {
|
||||
public PushOperatorBuilder push(String key) {
|
||||
|
||||
if (!pushCommandBuilders.containsKey(key)) {
|
||||
|
||||
if (pushCommandBuilders == Collections.EMPTY_MAP) {
|
||||
pushCommandBuilders = new LinkedHashMap<>(1);
|
||||
}
|
||||
|
||||
pushCommandBuilders.put(key, new PushOperatorBuilder(key));
|
||||
}
|
||||
return pushCommandBuilders.get(key);
|
||||
@@ -412,6 +417,10 @@ public class Update implements UpdateDefinition {
|
||||
*/
|
||||
public Update filterArray(CriteriaDefinition criteria) {
|
||||
|
||||
if (arrayFilters == Collections.EMPTY_LIST) {
|
||||
this.arrayFilters = new ArrayList<>();
|
||||
}
|
||||
|
||||
this.arrayFilters.add(criteria::getCriteriaObject);
|
||||
return this;
|
||||
}
|
||||
@@ -427,6 +436,10 @@ public class Update implements UpdateDefinition {
|
||||
*/
|
||||
public Update filterArray(String identifier, Object expression) {
|
||||
|
||||
if (arrayFilters == Collections.EMPTY_LIST) {
|
||||
this.arrayFilters = new ArrayList<>();
|
||||
}
|
||||
|
||||
this.arrayFilters.add(() -> new Document(identifier, expression));
|
||||
return this;
|
||||
}
|
||||
@@ -455,6 +468,15 @@ public class Update implements UpdateDefinition {
|
||||
return Collections.unmodifiableList(this.arrayFilters);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public boolean hasArrayFilters() {
|
||||
return !this.arrayFilters.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is not called anymore rather override {@link #addMultiFieldOperation(String, String, Object)}.
|
||||
*
|
||||
|
||||
@@ -70,7 +70,7 @@ public class MongoRepositoryFactoryBean<T extends Repository<S, ID>, S, ID exten
|
||||
* @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext)
|
||||
*/
|
||||
@Override
|
||||
protected void setMappingContext(MappingContext<?, ?> mappingContext) {
|
||||
public void setMappingContext(MappingContext<?, ?> mappingContext) {
|
||||
|
||||
super.setMappingContext(mappingContext);
|
||||
this.mappingContextConfigured = true;
|
||||
|
||||
@@ -212,6 +212,10 @@ public class QuerydslMongoPredicateExecutor<T> extends QuerydslPredicateExecutor
|
||||
*/
|
||||
private SpringDataMongodbQuery<T> applyPagination(SpringDataMongodbQuery<T> query, Pageable pageable) {
|
||||
|
||||
if (pageable.isUnpaged()) {
|
||||
return query;
|
||||
}
|
||||
|
||||
query = query.offset(pageable.getOffset()).limit(pageable.getPageSize());
|
||||
return applySorting(query, pageable.getSort());
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ public class ReactiveMongoRepositoryFactoryBean<T extends Repository<S, ID>, S,
|
||||
* @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext)
|
||||
*/
|
||||
@Override
|
||||
protected void setMappingContext(MappingContext<?, ?> mappingContext) {
|
||||
public void setMappingContext(MappingContext<?, ?> mappingContext) {
|
||||
|
||||
super.setMappingContext(mappingContext);
|
||||
this.mappingContextConfigured = true;
|
||||
|
||||
@@ -60,12 +60,26 @@ import com.mongodb.MongoClientSettings;
|
||||
*/
|
||||
public class BsonUtils {
|
||||
|
||||
/**
|
||||
* The empty document (immutable). This document is serializable.
|
||||
*
|
||||
* @since 3.2.5
|
||||
*/
|
||||
public static final Document EMPTY_DOCUMENT = new EmptyDocument();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Nullable
|
||||
public static <T> T get(Bson bson, String key) {
|
||||
return (T) asMap(bson).get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Bson} object as {@link Map}. Depending on the input type, the return value can be either a casted
|
||||
* version of {@code bson} or a converted (detached from the original value).
|
||||
*
|
||||
* @param bson
|
||||
* @return
|
||||
*/
|
||||
public static Map<String, Object> asMap(Bson bson) {
|
||||
|
||||
if (bson instanceof Document) {
|
||||
@@ -81,6 +95,55 @@ public class BsonUtils {
|
||||
return (Map) bson.toBsonDocument(Document.class, MongoClientSettings.getDefaultCodecRegistry());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Bson} object as {@link Document}. Depending on the input type, the return value can be either a
|
||||
* casted version of {@code bson} or a converted (detached from the original value).
|
||||
*
|
||||
* @param bson
|
||||
* @return
|
||||
* @since 3.2.5
|
||||
*/
|
||||
public static Document asDocument(Bson bson) {
|
||||
|
||||
if (bson instanceof Document) {
|
||||
return (Document) bson;
|
||||
}
|
||||
|
||||
Map<String, Object> map = asMap(bson);
|
||||
|
||||
if (map instanceof Document) {
|
||||
return (Document) map;
|
||||
}
|
||||
|
||||
return new Document(map);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Bson} object as mutable {@link Document} containing all entries from {@link Bson}.
|
||||
*
|
||||
* @param bson
|
||||
* @return a mutable {@link Document} containing all entries from {@link Bson}.
|
||||
* @since 3.2.5
|
||||
*/
|
||||
public static Document asMutableDocument(Bson bson) {
|
||||
|
||||
if (bson instanceof EmptyDocument) {
|
||||
bson = new Document(asDocument(bson));
|
||||
}
|
||||
|
||||
if (bson instanceof Document) {
|
||||
return (Document) bson;
|
||||
}
|
||||
|
||||
Map<String, Object> map = asMap(bson);
|
||||
|
||||
if (map instanceof Document) {
|
||||
return (Document) map;
|
||||
}
|
||||
|
||||
return new Document(map);
|
||||
}
|
||||
|
||||
public static void addToMap(Bson bson, String key, @Nullable Object value) {
|
||||
|
||||
if (bson instanceof Document) {
|
||||
|
||||
@@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.util;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.BiFunction;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
/**
|
||||
* Empty variant of {@link Document}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class EmptyDocument extends Document {
|
||||
|
||||
@Override
|
||||
public Document append(String key, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object put(String key, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object remove(Object key) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(Map<? extends String, ?> map) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceAll(BiFunction<? super String, ? super Object, ?> function) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object key, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean replace(String key, Object oldValue, Object newValue) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object replace(String key, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Entry<String, Object>> entrySet() {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> values() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> keySet() {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -364,6 +364,28 @@ infix fun KProperty<GeoJson<*>>.maxDistance(d: Double): Criteria =
|
||||
infix fun KProperty<GeoJson<*>>.minDistance(d: Double): Criteria =
|
||||
Criteria(asString(this)).minDistance(d)
|
||||
|
||||
/**
|
||||
* Creates a geo-spatial criterion using a $maxDistance operation, for use with $near
|
||||
*
|
||||
* See [MongoDB Query operator:
|
||||
* $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/)
|
||||
* @author Sangyong Choi
|
||||
* @since 3.2.5
|
||||
* @see Criteria.maxDistance
|
||||
*/
|
||||
infix fun Criteria.maxDistance(d: Double): Criteria =
|
||||
this.maxDistance(d)
|
||||
|
||||
/**
|
||||
* Creates a geospatial criterion using a $minDistance operation, for use with $near or
|
||||
* $nearSphere.
|
||||
* @author Sangyong Choi
|
||||
* @since 3.2.5
|
||||
* @see Criteria.minDistance
|
||||
*/
|
||||
infix fun Criteria.minDistance(d: Double): Criteria =
|
||||
this.minDistance(d)
|
||||
|
||||
/**
|
||||
* Creates a criterion using the $elemMatch operator
|
||||
*
|
||||
|
||||
@@ -109,6 +109,30 @@ class MongoDatabaseUtilsUnitTests {
|
||||
verify(dbFactory, never()).withSession(any(ClientSession.class));
|
||||
}
|
||||
|
||||
@Test // GH-3760
|
||||
void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() throws Exception {
|
||||
|
||||
when(dbFactory.getMongoDatabase()).thenReturn(db);
|
||||
|
||||
JtaTransactionManager txManager = new JtaTransactionManager(userTransaction);
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(txManager);
|
||||
|
||||
txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||
|
||||
@Override
|
||||
protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) {
|
||||
|
||||
MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.NEVER);
|
||||
|
||||
assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse();
|
||||
}
|
||||
});
|
||||
|
||||
verify(userTransaction).getStatus();
|
||||
verifyNoMoreInteractions(userTransaction);
|
||||
verifyNoInteractions(session);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1920
|
||||
void shouldParticipateInOngoingJtaTransactionWithCommitWhenSessionSychronizationIsAny() throws Exception {
|
||||
|
||||
|
||||
@@ -88,6 +88,20 @@ class ReactiveMongoDatabaseUtilsUnitTests {
|
||||
}).as(StepVerifier::create).expectNext(true).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-3760
|
||||
void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() {
|
||||
|
||||
when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db));
|
||||
|
||||
ReactiveMongoDatabaseUtils.getDatabase(databaseFactory, SessionSynchronization.NEVER) //
|
||||
.as(StepVerifier::create) //
|
||||
.expectNextCount(1) //
|
||||
.verifyComplete();
|
||||
|
||||
verify(databaseFactory, never()).getSession(any());
|
||||
verify(databaseFactory, never()).withSession(any(ClientSession.class));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2265
|
||||
void shouldNotStartSessionWhenNoTransactionOngoing() {
|
||||
|
||||
|
||||
@@ -3703,6 +3703,23 @@ public class MongoTemplateTests {
|
||||
assertThat(template.find(new BasicQuery("{}").with(Sort.by("id")), WithIdAndFieldAnnotation.class)).isNotEmpty();
|
||||
}
|
||||
|
||||
@Test // GH-3811
|
||||
public void sliceShouldLimitCollectionValues() {
|
||||
|
||||
DocumentWithCollectionOfSimpleType source = new DocumentWithCollectionOfSimpleType();
|
||||
source.id = "id-1";
|
||||
source.values = Arrays.asList("spring", "data", "mongodb");
|
||||
|
||||
template.save(source);
|
||||
|
||||
Criteria criteria = Criteria.where("id").is(source.id);
|
||||
Query query = Query.query(criteria);
|
||||
query.fields().slice("values", 0, 1);
|
||||
DocumentWithCollectionOfSimpleType target = template.findOne(query, DocumentWithCollectionOfSimpleType.class);
|
||||
|
||||
assertThat(target.values).containsExactly("spring");
|
||||
}
|
||||
|
||||
private AtomicReference<ImmutableVersioned> createAfterSaveReference() {
|
||||
|
||||
AtomicReference<ImmutableVersioned> saved = new AtomicReference<>();
|
||||
|
||||
@@ -95,6 +95,7 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
@@ -1050,7 +1051,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonSpELProjection.class,
|
||||
CursorPreparer.NO_OP_PREPARER);
|
||||
|
||||
verify(findIterable).projection(eq(new Document()));
|
||||
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1733, DATAMONGO-2041
|
||||
@@ -1077,7 +1078,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
template.doFind("star-wars", new Document(), new Document(), Person.class, Person.class,
|
||||
CursorPreparer.NO_OP_PREPARER);
|
||||
|
||||
verify(findIterable).projection(eq(new Document()));
|
||||
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1733
|
||||
@@ -1086,7 +1087,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonExtended.class,
|
||||
CursorPreparer.NO_OP_PREPARER);
|
||||
|
||||
verify(findIterable).projection(eq(new Document()));
|
||||
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1348, DATAMONGO-2264
|
||||
|
||||
@@ -27,6 +27,7 @@ import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperationUnitTests.BookWithFieldAnnotation;
|
||||
@@ -598,9 +599,31 @@ public class AggregationUnitTests {
|
||||
assertThat(extractPipelineElement(target, 1, "$project")).isEqualTo(Document.parse(" { \"_id\" : \"$_id\" }"));
|
||||
}
|
||||
|
||||
|
||||
@Test // GH-3898
|
||||
void shouldNotConvertIncludeExcludeValuesForProjectOperation() {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
RelaxedTypeBasedAggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(WithRetypedIdField.class, mappingContext,
|
||||
new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)));
|
||||
Document document = project(WithRetypedIdField.class).toDocument(context);
|
||||
assertThat(document).isEqualTo(new Document("$project", new Document("_id", 1).append("renamed-field", 1)));
|
||||
}
|
||||
|
||||
private Document extractPipelineElement(Document agg, int index, String operation) {
|
||||
|
||||
List<Document> pipeline = (List<Document>) agg.get("pipeline");
|
||||
return (Document) pipeline.get(index).get(operation);
|
||||
}
|
||||
|
||||
public class WithRetypedIdField {
|
||||
|
||||
@Id
|
||||
@org.springframework.data.mongodb.core.mapping.Field
|
||||
private String id;
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Field("renamed-field")
|
||||
private String foo;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,11 +33,6 @@ import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.mockito.junit.jupiter.MockitoSettings;
|
||||
import org.mockito.quality.Strictness;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Id;
|
||||
@@ -64,6 +59,7 @@ import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.TextQuery;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
@@ -79,22 +75,21 @@ import com.mongodb.client.model.Filters;
|
||||
* @author Mark Paluch
|
||||
* @author David Julia
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@MockitoSettings(strictness = Strictness.LENIENT)
|
||||
public class QueryMapperUnitTests {
|
||||
|
||||
private QueryMapper mapper;
|
||||
private MongoMappingContext context;
|
||||
private MappingMongoConverter converter;
|
||||
|
||||
@Mock MongoDatabaseFactory factory;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
|
||||
MongoCustomConversions conversions = new MongoCustomConversions();
|
||||
this.context = new MongoMappingContext();
|
||||
this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder());
|
||||
|
||||
this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context);
|
||||
this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context);
|
||||
this.converter.setCustomConversions(conversions);
|
||||
this.converter.afterPropertiesSet();
|
||||
|
||||
this.mapper = new QueryMapper(converter);
|
||||
@@ -1333,6 +1328,25 @@ public class QueryMapperUnitTests {
|
||||
assertThat(mappedFields).containsEntry("_id", 1);
|
||||
}
|
||||
|
||||
@Test // GH-3783
|
||||
void retainsId$InWithStringArray() {
|
||||
|
||||
org.bson.Document mappedQuery = mapper.getMappedObject(
|
||||
org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"),
|
||||
context.getPersistentEntity(WithExplicitStringId.class));
|
||||
assertThat(mappedQuery.get("_id")).isEqualTo(org.bson.Document.parse("{ $in: [\"5b8bedceb1e0bfc07b008828\"]}"));
|
||||
}
|
||||
|
||||
@Test // GH-3783
|
||||
void mapsId$InInToObjectIds() {
|
||||
|
||||
org.bson.Document mappedQuery = mapper.getMappedObject(
|
||||
org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"),
|
||||
context.getPersistentEntity(ClassWithDefaultId.class));
|
||||
assertThat(mappedQuery.get("_id"))
|
||||
.isEqualTo(org.bson.Document.parse("{ $in: [ {$oid: \"5b8bedceb1e0bfc07b008828\" } ]}"));
|
||||
}
|
||||
|
||||
class WithDeepArrayNesting {
|
||||
|
||||
List<WithNestedArray> level0;
|
||||
@@ -1402,6 +1416,12 @@ public class QueryMapperUnitTests {
|
||||
String name;
|
||||
}
|
||||
|
||||
class WithExplicitStringId {
|
||||
|
||||
@MongoId(FieldType.STRING) String id;
|
||||
String name;
|
||||
}
|
||||
|
||||
class BigIntegerId {
|
||||
|
||||
@Id private BigInteger id;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2021 the original author or authors.
|
||||
* Copyright 2013-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -67,6 +67,7 @@ import com.mongodb.DBRef;
|
||||
* @author Mark Paluch
|
||||
* @author Pavel Vodrazka
|
||||
* @author David Julia
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class UpdateMapperUnitTests {
|
||||
@@ -1200,6 +1201,86 @@ class UpdateMapperUnitTests {
|
||||
assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.class\": \"value\"}}");
|
||||
}
|
||||
|
||||
@Test // GH-3775
|
||||
void mapNestedStringFieldCorrectly() {
|
||||
|
||||
Update update = new Update().set("levelOne.a.b.d", "e");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithNestedMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.a.b.d", "e")));
|
||||
}
|
||||
|
||||
@Test // GH-3775
|
||||
void mapNestedIntegerFieldCorrectly() {
|
||||
|
||||
Update update = new Update().set("levelOne.0.1.3", "4");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithNestedMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0.1.3", "4")));
|
||||
}
|
||||
|
||||
@Test // GH-3775
|
||||
void mapNestedMixedStringIntegerFieldCorrectly() {
|
||||
|
||||
Update update = new Update().set("levelOne.0.1.c", "4");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithNestedMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0.1.c", "4")));
|
||||
}
|
||||
|
||||
@Test // GH-3775
|
||||
void mapNestedMixedStringIntegerWithStartNumberFieldCorrectly() {
|
||||
|
||||
Update update = new Update().set("levelOne.0a.1b.3c", "4");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithNestedMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0a.1b.3c", "4")));
|
||||
}
|
||||
|
||||
@Test // GH-3688
|
||||
void multipleKeysStartingWithANumberInNestedPath() {
|
||||
|
||||
Update update = new Update().set("intKeyedMap.1a.map.0b", "testing");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithIntKeyedMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.1a.map.0b\": \"testing\"}}");
|
||||
}
|
||||
|
||||
@Test // GH-3921
|
||||
void mapNumericKeyInPathHavingComplexMapValyeTypes() {
|
||||
|
||||
Update update = new Update().set("testInnerData.testMap.1.intValue", "4");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(TestData.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.testMap.1.intValue': '4' }}");
|
||||
}
|
||||
|
||||
@Test // GH-3921
|
||||
void mapNumericKeyInPathNotMatchingExistingProperties() {
|
||||
|
||||
Update update = new Update().set("testInnerData.imaginaryMap.1.nonExistingProperty", "4");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(TestData.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.imaginaryMap.1.nonExistingProperty': '4' }}");
|
||||
}
|
||||
|
||||
@Test // GH-3921
|
||||
void mapNumericKeyInPathPartiallyMatchingExistingProperties() {
|
||||
|
||||
Update update = new Update().set("testInnerData.testMap.1.nonExistingProperty.2.someValue", "4");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(TestData.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.testMap.1.nonExistingProperty.2.someValue': '4' }}");
|
||||
}
|
||||
|
||||
static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes {
|
||||
ListModelWrapper concreteTypeWithListAttributeOfInterfaceType;
|
||||
}
|
||||
@@ -1436,7 +1517,7 @@ class UpdateMapperUnitTests {
|
||||
Map<Object, NestedDocument> concreteMap;
|
||||
}
|
||||
|
||||
static class EntityWithIntKeyedMap{
|
||||
static class EntityWithIntKeyedMap {
|
||||
Map<Integer, EntityWithObjectMap> intKeyedMap;
|
||||
}
|
||||
|
||||
@@ -1566,4 +1647,23 @@ class UpdateMapperUnitTests {
|
||||
String transientValue;
|
||||
}
|
||||
|
||||
static class EntityWithNestedMap {
|
||||
Map<String, Map<String, Map<String, Object>>> levelOne;
|
||||
}
|
||||
|
||||
@Data
|
||||
private static class TestData {
|
||||
@Id private String id;
|
||||
private TestInnerData testInnerData;
|
||||
}
|
||||
|
||||
@Data
|
||||
private static class TestInnerData {
|
||||
private Map<Integer, TestValue> testMap;
|
||||
}
|
||||
|
||||
@Data
|
||||
private static class TestValue {
|
||||
private int intValue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,6 +63,7 @@ import com.mongodb.client.MongoCollection;
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Ivan Volzhev
|
||||
*/
|
||||
@ExtendWith({ MongoClientExtension.class, SpringExtension.class })
|
||||
@ContextConfiguration
|
||||
@@ -329,6 +330,21 @@ public class GeoJsonTests {
|
||||
assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-3776
|
||||
public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeWithOnePointCorrectly() {
|
||||
|
||||
DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType();
|
||||
obj.id = "geoJsonMultiPoint";
|
||||
obj.geoJsonMultiPoint = new GeoJsonMultiPoint(new Point(0, 0));
|
||||
|
||||
template.save(obj);
|
||||
|
||||
DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)),
|
||||
DocumentWithPropertyUsingGeoJsonType.class);
|
||||
|
||||
assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1137
|
||||
public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPolygonTypeCorrectly() {
|
||||
|
||||
|
||||
@@ -237,11 +237,8 @@ class QueryTests {
|
||||
source.addCriteria(where("From one make ten").is("and two let be."));
|
||||
Query target = Query.of(source);
|
||||
|
||||
compareQueries(target, source);
|
||||
source.addCriteria(where("Make even three").is("then rich you'll be."));
|
||||
|
||||
assertThat(target.getQueryObject()).isEqualTo(new Document("From one make ten", "and two let be."))
|
||||
.isNotEqualTo(source.getQueryObject());
|
||||
assertThat(target.getQueryObject()).containsAllEntriesOf(new Document("From one make ten", "and two let be."))
|
||||
.isNotSameAs(source.getQueryObject());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1783
|
||||
@@ -353,9 +350,12 @@ class QueryTests {
|
||||
private void compareQueries(Query actual, Query expected) {
|
||||
|
||||
assertThat(actual.getCollation()).isEqualTo(expected.getCollation());
|
||||
assertThat(actual.getSortObject()).isEqualTo(expected.getSortObject());
|
||||
assertThat(actual.getFieldsObject()).isEqualTo(expected.getFieldsObject());
|
||||
assertThat(actual.getQueryObject()).isEqualTo(expected.getQueryObject());
|
||||
assertThat(actual.getSortObject()).hasSameSizeAs(expected.getSortObject())
|
||||
.containsAllEntriesOf(expected.getSortObject());
|
||||
assertThat(actual.getFieldsObject()).hasSameSizeAs(expected.getFieldsObject())
|
||||
.containsAllEntriesOf(expected.getFieldsObject());
|
||||
assertThat(actual.getQueryObject()).hasSameSizeAs(expected.getQueryObject())
|
||||
.containsAllEntriesOf(expected.getQueryObject());
|
||||
assertThat(actual.getHint()).isEqualTo(expected.getHint());
|
||||
assertThat(actual.getLimit()).isEqualTo(expected.getLimit());
|
||||
assertThat(actual.getSkip()).isEqualTo(expected.getSkip());
|
||||
|
||||
@@ -127,8 +127,8 @@ public class PartTreeMongoQueryUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1345, DATAMONGO-1735
|
||||
public void doesNotDeriveFieldSpecForNormalDomainType() {
|
||||
assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEqualTo(new Document());
|
||||
void doesNotDeriveFieldSpecForNormalDomainType() {
|
||||
assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEmpty();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1345
|
||||
@@ -173,7 +173,7 @@ public class PartTreeMongoQueryUnitTests {
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findAllBy");
|
||||
|
||||
assertThat(query.getFieldsObject()).isEqualTo(new Document());
|
||||
assertThat(query.getFieldsObject()).isEmpty();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1865
|
||||
|
||||
@@ -27,6 +27,8 @@ import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
@@ -122,6 +124,20 @@ public class QuerydslMongoPredicateExecutorIntegrationTests {
|
||||
.containsExactly(dave);
|
||||
}
|
||||
|
||||
@Test // GH-3751
|
||||
public void findPage() {
|
||||
|
||||
assertThat(repository
|
||||
.findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())),
|
||||
PageRequest.of(0, 10))
|
||||
.getContent()).containsExactly(dave);
|
||||
|
||||
assertThat(repository
|
||||
.findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())),
|
||||
Pageable.unpaged())
|
||||
.getContent()).containsExactly(dave);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-362, DATAMONGO-1848
|
||||
public void springDataMongodbQueryShouldAllowJoinOnDBref() {
|
||||
|
||||
|
||||
@@ -25,8 +25,11 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type
|
||||
import java.util.regex.Pattern
|
||||
|
||||
/**
|
||||
* Unit tests for [Criteria] extensions.
|
||||
*
|
||||
* @author Tjeu Kayim
|
||||
* @author Mark Paluch
|
||||
* @author Sangyong Choi
|
||||
*/
|
||||
class TypedCriteriaExtensionsTests {
|
||||
|
||||
@@ -317,6 +320,54 @@ class TypedCriteriaExtensionsTests {
|
||||
assertThat(typed).isEqualTo(expected)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `maxDistance() should equal expected criteria with nearSphere`() {
|
||||
|
||||
val point = Point(0.0, 0.0)
|
||||
val typed = Building::location nearSphere point maxDistance 3.0
|
||||
val expected = Criteria("location")
|
||||
.nearSphere(point)
|
||||
.maxDistance(3.0)
|
||||
|
||||
assertThat(typed).isEqualTo(expected)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `minDistance() should equal expected criteria with nearSphere`() {
|
||||
|
||||
val point = Point(0.0, 0.0)
|
||||
val typed = Building::location nearSphere point minDistance 3.0
|
||||
val expected = Criteria("location")
|
||||
.nearSphere(point)
|
||||
.minDistance(3.0)
|
||||
|
||||
assertThat(typed).isEqualTo(expected)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `maxDistance() should equal expected criteria with near`() {
|
||||
|
||||
val point = Point(0.0, 0.0)
|
||||
val typed = Building::location near point maxDistance 3.0
|
||||
val expected = Criteria("location")
|
||||
.near(point)
|
||||
.maxDistance(3.0)
|
||||
|
||||
assertThat(typed).isEqualTo(expected)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `minDistance() should equal expected criteria with near`() {
|
||||
|
||||
val point = Point(0.0, 0.0)
|
||||
val typed = Building::location near point minDistance 3.0
|
||||
val expected = Criteria("location")
|
||||
.near(point)
|
||||
.minDistance(3.0)
|
||||
|
||||
assertThat(typed).isEqualTo(expected)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `elemMatch() should equal expected criteria`() {
|
||||
|
||||
|
||||
653
src/main/asciidoc/reference/aggregation-framework.adoc
Normal file
653
src/main/asciidoc/reference/aggregation-framework.adoc
Normal file
@@ -0,0 +1,653 @@
|
||||
[[mongo.aggregation]]
|
||||
== Aggregation Framework Support
|
||||
|
||||
Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2.
|
||||
|
||||
For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB.
|
||||
|
||||
[[mongo.aggregation.basic-concepts]]
|
||||
=== Basic Concepts
|
||||
|
||||
The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationDefinition`, and `AggregationResults`.
|
||||
|
||||
* `Aggregation`
|
||||
+
|
||||
An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class.
|
||||
+
|
||||
The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter.
|
||||
+
|
||||
* `TypedAggregation`
|
||||
+
|
||||
A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields.
|
||||
+
|
||||
At runtime, field references get checked against the given input type, considering potential `@Field` annotations.
|
||||
[NOTE]
|
||||
====
|
||||
Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`.
|
||||
====
|
||||
* `AggregationDefinition`
|
||||
+
|
||||
An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`.
|
||||
+
|
||||
* `AggregationResults`
|
||||
+
|
||||
`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation.
|
||||
+
|
||||
The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework:
|
||||
+
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
Aggregation agg = newAggregation(
|
||||
pipelineOP1(),
|
||||
pipelineOP2(),
|
||||
pipelineOPn()
|
||||
);
|
||||
|
||||
AggregationResults<OutputType> results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class);
|
||||
List<OutputType> mappedResult = results.getMappedResults();
|
||||
----
|
||||
|
||||
Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence.
|
||||
|
||||
[[mongo.aggregation.supported-aggregation-operations]]
|
||||
=== Supported Aggregation Operations
|
||||
|
||||
The MongoDB Aggregation Framework provides the following types of aggregation operations:
|
||||
|
||||
* Pipeline Aggregation Operators
|
||||
* Group/Accumulator Aggregation Operators
|
||||
* Boolean Aggregation Operators
|
||||
* Comparison Aggregation Operators
|
||||
* Arithmetic Aggregation Operators
|
||||
* String Aggregation Operators
|
||||
* Date Aggregation Operators
|
||||
* Array Aggregation Operators
|
||||
* Conditional Aggregation Operators
|
||||
* Lookup Aggregation Operators
|
||||
* Convert Aggregation Operators
|
||||
* Object Aggregation Operators
|
||||
* Script Aggregation Operators
|
||||
|
||||
At the time of this writing, we provide support for the following Aggregation Operations in Spring Data MongoDB:
|
||||
|
||||
.Aggregation Operations currently supported by Spring Data MongoDB
|
||||
[cols="2*"]
|
||||
|===
|
||||
| Pipeline Aggregation Operators
|
||||
| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind`
|
||||
|
||||
| Set Aggregation Operators
|
||||
| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue`
|
||||
|
||||
| Group/Accumulator Aggregation Operators
|
||||
| `addToSet`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp`
|
||||
|
||||
| Arithmetic Aggregation Operators
|
||||
| `abs`, `add` (+++*+++ via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc`
|
||||
|
||||
| String Aggregation Operators
|
||||
| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim`
|
||||
|
||||
| Comparison Aggregation Operators
|
||||
| `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne`
|
||||
|
||||
| Array Aggregation Operators
|
||||
| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip`
|
||||
|
||||
| Literal Operators
|
||||
| `literal`
|
||||
|
||||
| Date Aggregation Operators
|
||||
| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear`
|
||||
|
||||
| Variable Operators
|
||||
| `map`
|
||||
|
||||
| Conditional Aggregation Operators
|
||||
| `cond`, `ifNull`, `switch`
|
||||
|
||||
| Type Aggregation Operators
|
||||
| `type`
|
||||
|
||||
| Convert Aggregation Operators
|
||||
| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString`
|
||||
|
||||
| Object Aggregation Operators
|
||||
| `objectToArray`, `mergeObjects`
|
||||
|
||||
| Script Aggregation Operators
|
||||
| `function`, `accumulator`
|
||||
|===
|
||||
|
||||
+++*+++ The operation is mapped or added by Spring Data MongoDB.
|
||||
|
||||
Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions.
|
||||
|
||||
[[mongo.aggregation.projection]]
|
||||
=== Projection Expressions
|
||||
|
||||
Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method.
|
||||
Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression:
|
||||
|
||||
.Projection expression examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$project: {name: 1, netPrice: 1}}
|
||||
project("name", "netPrice")
|
||||
|
||||
// generates {$project: {thing1: $thing2}}
|
||||
project().and("thing1").as("thing2")
|
||||
|
||||
// generates {$project: {a: 1, b: 1, thing2: $thing1}}
|
||||
project("a","b").and("thing1").as("thing2")
|
||||
----
|
||||
====
|
||||
|
||||
.Multi-Stage Aggregation using Projection and Sorting
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}}
|
||||
project("name", "netPrice"), sort(ASC, "name")
|
||||
|
||||
// generates {$project: {name: $firstname}}, {$sort: {name: 1}}
|
||||
project().and("firstname").as("name"), sort(ASC, "name")
|
||||
|
||||
// does not work
|
||||
project().and("firstname").as("name"), sort(ASC, "firstname")
|
||||
----
|
||||
====
|
||||
|
||||
More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.facet]]
|
||||
=== Faceted Classification
|
||||
|
||||
As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times.
|
||||
|
||||
==== Buckets
|
||||
|
||||
Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output.
|
||||
|
||||
`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations:
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}}
|
||||
bucket("price").withBoundaries(0, 100, 400);
|
||||
|
||||
// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}}
|
||||
bucket("price").withBoundaries(0, 100).withDefault("Other");
|
||||
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutputCount().as("count");
|
||||
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations:
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5}}
|
||||
bucketAuto("price", 5)
|
||||
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}}
|
||||
bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other");
|
||||
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}}
|
||||
bucketAuto("price", 5).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <<mongo.aggregation.projection.expressions, SpEL expressions>> through `andOutputExpression()`.
|
||||
|
||||
Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and
|
||||
https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
==== Multi-faceted Aggregation
|
||||
|
||||
Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors.
|
||||
|
||||
You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents.
|
||||
|
||||
Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples:
|
||||
|
||||
.Facet operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}}
|
||||
facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice"))
|
||||
|
||||
// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}}
|
||||
facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry"))
|
||||
|
||||
// generates {$facet: {categorizedByYear: [
|
||||
// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}},
|
||||
// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}}
|
||||
// ]}}
|
||||
facet(project("title").and("publicationDate").extractYear().as("publicationYear"),
|
||||
bucketAuto("publicationYear", 5).andOutput("title").push().as("titles"))
|
||||
.as("categorizedByYear"))
|
||||
----
|
||||
====
|
||||
|
||||
Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.sort-by-count]]
|
||||
==== Sort By Count
|
||||
|
||||
Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <<mongo.aggregation.facet>>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example:
|
||||
|
||||
.Sort by count example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates { $sortByCount: "$country" }
|
||||
sortByCount("country");
|
||||
----
|
||||
====
|
||||
|
||||
A sort by count operation is equivalent to the following BSON (Binary JSON):
|
||||
|
||||
----
|
||||
{ $group: { _id: <expression>, count: { $sum: 1 } } },
|
||||
{ $sort: { count: -1 } }
|
||||
----
|
||||
|
||||
[[mongo.aggregation.projection.expressions]]
|
||||
==== Spring Expression Support in Projection Expressions
|
||||
|
||||
We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations.
|
||||
|
||||
===== Complex Calculations with SpEL expressions
|
||||
|
||||
Consider the following SpEL expression:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
1 + (q + 1) / (q - 1)
|
||||
----
|
||||
|
||||
The preceding expression is translated into the following projection expression part:
|
||||
|
||||
[source,javascript]
|
||||
----
|
||||
{ "$add" : [ 1, {
|
||||
"$divide" : [ {
|
||||
"$add":["$q", 1]}, {
|
||||
"$subtract":[ "$q", 1]}
|
||||
]
|
||||
}]}
|
||||
----
|
||||
|
||||
You can see examples in more context in <<mongo.aggregation.examples.example5>> and <<mongo.aggregation.examples.example6>>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB:
|
||||
|
||||
.Supported SpEL transformations
|
||||
[%header,cols="2"]
|
||||
|===
|
||||
| SpEL Expression
|
||||
| Mongo Expression Part
|
||||
| a == b
|
||||
| { $eq : [$a, $b] }
|
||||
| a != b
|
||||
| { $ne : [$a , $b] }
|
||||
| a > b
|
||||
| { $gt : [$a, $b] }
|
||||
| a >= b
|
||||
| { $gte : [$a, $b] }
|
||||
| a < b
|
||||
| { $lt : [$a, $b] }
|
||||
| a <= b
|
||||
| { $lte : [$a, $b] }
|
||||
| a + b
|
||||
| { $add : [$a, $b] }
|
||||
| a - b
|
||||
| { $subtract : [$a, $b] }
|
||||
| a * b
|
||||
| { $multiply : [$a, $b] }
|
||||
| a / b
|
||||
| { $divide : [$a, $b] }
|
||||
| a^b
|
||||
| { $pow : [$a, $b] }
|
||||
| a % b
|
||||
| { $mod : [$a, $b] }
|
||||
| a && b
|
||||
| { $and : [$a, $b] }
|
||||
| a \|\| b
|
||||
| { $or : [$a, $b] }
|
||||
| !a
|
||||
| { $not : [$a] }
|
||||
|===
|
||||
|
||||
In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// { $setEquals : [$a, [5, 8, 13] ] }
|
||||
.andExpression("setEquals(a, new int[]{5, 8, 13})");
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples]]
|
||||
==== Aggregation Framework Examples
|
||||
|
||||
The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB.
|
||||
|
||||
[[mongo.aggregation.examples.example1]]
|
||||
===== Aggregation Framework Example 1
|
||||
|
||||
In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class TagCount {
|
||||
String tag;
|
||||
int n;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
Aggregation agg = newAggregation(
|
||||
project("tags"),
|
||||
unwind("tags"),
|
||||
group("tags").count().as("n"),
|
||||
project("n").and("tag").previousOperation(),
|
||||
sort(DESC, "n")
|
||||
);
|
||||
|
||||
AggregationResults<TagCount> results = mongoTemplate.aggregate(agg, "tags", TagCount.class);
|
||||
List<TagCount> tagCount = results.getMappedResults();
|
||||
----
|
||||
|
||||
The preceding listing uses the following algorithm:
|
||||
|
||||
. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`.
|
||||
. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection.
|
||||
. Use the `unwind` operation to generate a new document for each tag within the `tags` array.
|
||||
. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`).
|
||||
. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`.
|
||||
. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order.
|
||||
. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument.
|
||||
|
||||
Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example2]]
|
||||
===== Aggregation Framework Example 2
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class ZipInfo {
|
||||
String id;
|
||||
String city;
|
||||
String state;
|
||||
@Field("pop") int population;
|
||||
@Field("loc") double[] location;
|
||||
}
|
||||
|
||||
class City {
|
||||
String name;
|
||||
int population;
|
||||
}
|
||||
|
||||
class ZipInfoStats {
|
||||
String id;
|
||||
String state;
|
||||
City biggestCity;
|
||||
City smallestCity;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<ZipInfo> aggregation = newAggregation(ZipInfo.class,
|
||||
group("state", "city")
|
||||
.sum("population").as("pop"),
|
||||
sort(ASC, "pop", "state", "city"),
|
||||
group("state")
|
||||
.last("city").as("biggestCity")
|
||||
.last("pop").as("biggestPop")
|
||||
.first("city").as("smallestCity")
|
||||
.first("pop").as("smallestPop"),
|
||||
project()
|
||||
.and("state").previousOperation()
|
||||
.and("biggestCity")
|
||||
.nested(bind("name", "biggestCity").and("population", "biggestPop"))
|
||||
.and("smallestCity")
|
||||
.nested(bind("name", "smallestCity").and("population", "smallestPop")),
|
||||
sort(ASC, "state")
|
||||
);
|
||||
|
||||
AggregationResults<ZipInfoStats> result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class);
|
||||
ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0);
|
||||
----
|
||||
|
||||
Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format.
|
||||
|
||||
The preceding listings use the following algorithm:
|
||||
|
||||
. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field.
|
||||
. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled).
|
||||
. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation.
|
||||
. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method.
|
||||
. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation.
|
||||
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example3]]
|
||||
===== Aggregation Framework Example 3
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class StateStats {
|
||||
@Id String id;
|
||||
String state;
|
||||
@Field("totalPop") int totalPopulation;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<ZipInfo> agg = newAggregation(ZipInfo.class,
|
||||
group("state").sum("population").as("totalPop"),
|
||||
sort(ASC, previousOperation(), "totalPop"),
|
||||
match(where("totalPop").gte(10 * 1000 * 1000))
|
||||
);
|
||||
|
||||
AggregationResults<StateStats> result = mongoTemplate.aggregate(agg, StateStats.class);
|
||||
List<StateStats> stateStatsList = result.getMappedResults();
|
||||
----
|
||||
|
||||
The preceding listings use the following algorithm:
|
||||
|
||||
. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`.
|
||||
. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order.
|
||||
. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument.
|
||||
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example4]]
|
||||
===== Aggregation Framework Example 4
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations in the projection operation.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.and("netPrice").plus(1).as("netPricePlus1")
|
||||
.and("netPrice").minus(1).as("netPriceMinus1")
|
||||
.and("netPrice").multiply(1.19).as("grossPrice")
|
||||
.and("netPrice").divide(2).as("netPriceDiv2")
|
||||
.and("spaceUnits").mod(2).as("spaceUnitsMod2")
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example5]]
|
||||
===== Aggregation Framework Example 5
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.andExpression("netPrice + 1").as("netPricePlus1")
|
||||
.andExpression("netPrice - 1").as("netPriceMinus1")
|
||||
.andExpression("netPrice / 2").as("netPriceDiv2")
|
||||
.andExpression("netPrice * 1.19").as("grossPrice")
|
||||
.andExpression("spaceUnits % 2").as("spaceUnitsMod2")
|
||||
.andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge")
|
||||
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples.example6]]
|
||||
===== Aggregation Framework Example 6
|
||||
|
||||
This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
double shippingCosts = 1.2;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice")
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
Note that we can also refer to other fields of the document within the SpEL expression.
|
||||
|
||||
[[mongo.aggregation.examples.example7]]
|
||||
===== Aggregation Framework Example 7
|
||||
|
||||
This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation].
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class InventoryItem {
|
||||
|
||||
@Id int id;
|
||||
String item;
|
||||
String description;
|
||||
int qty;
|
||||
}
|
||||
|
||||
public class InventoryItemProjection {
|
||||
|
||||
@Id int id;
|
||||
String item;
|
||||
String description;
|
||||
int qty;
|
||||
int discount
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<InventoryItem> agg = newAggregation(InventoryItem.class,
|
||||
project("item").and("discount")
|
||||
.applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250))
|
||||
.then(30)
|
||||
.otherwise(20))
|
||||
.and(ifNull("description", "Unspecified")).as("description")
|
||||
);
|
||||
|
||||
AggregationResults<InventoryItemProjection> result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class);
|
||||
List<InventoryItemProjection> stateStatsList = result.getMappedResults();
|
||||
----
|
||||
|
||||
This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description.
|
||||
|
||||
As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression.
|
||||
|
||||
.Conditional aggregation projection
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
TypedAggregation<Book> agg = Aggregation.newAggregation(Book.class,
|
||||
project("title")
|
||||
.and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1>
|
||||
.equalToValue("")) <2>
|
||||
.then("$$REMOVE") <3>
|
||||
.otherwiseValueOf("author.middle") <4>
|
||||
)
|
||||
.as("author.middle"));
|
||||
----
|
||||
<1> If the value of the field `author.middle`
|
||||
<2> does not contain a value,
|
||||
<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field.
|
||||
<4> Otherwise, add the field value of `author.middle`.
|
||||
====
|
||||
115
src/main/asciidoc/reference/gridfs.adoc
Normal file
115
src/main/asciidoc/reference/gridfs.adoc
Normal file
@@ -0,0 +1,115 @@
|
||||
[[gridfs]]
|
||||
== GridFS Support
|
||||
|
||||
MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows:
|
||||
|
||||
.JavaConfig setup for a GridFsTemplate
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsConfiguration extends AbstractMongoClientConfiguration {
|
||||
|
||||
// … further configuration omitted
|
||||
|
||||
@Bean
|
||||
public GridFsTemplate gridFsTemplate() {
|
||||
return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The corresponding XML configuration follows:
|
||||
|
||||
.XML configuration for a GridFsTemplate
|
||||
====
|
||||
[source,xml]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo
|
||||
https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||
|
||||
<mongo:db-factory id="mongoDbFactory" dbname="database" />
|
||||
<mongo:mapping-converter id="converter" />
|
||||
|
||||
<bean class="org.springframework.data.mongodb.gridfs.GridFsTemplate">
|
||||
<constructor-arg ref="mongoDbFactory" />
|
||||
<constructor-arg ref="converter" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
----
|
||||
====
|
||||
|
||||
The template can now be injected and used to perform storage and retrieval operations, as the following example shows:
|
||||
|
||||
.Using GridFsTemplate to store files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void storeFileToGridFs() {
|
||||
|
||||
FileMetadata metadata = new FileMetadata();
|
||||
// populate metadata
|
||||
Resource file = … // lookup File or Resource
|
||||
|
||||
operations.store(file.getInputStream(), "filename.txt", metadata);
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`.
|
||||
|
||||
You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files:
|
||||
|
||||
.Using GridFsTemplate to query for files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void findFilesInGridFs() {
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")))
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded.
|
||||
|
||||
The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files:
|
||||
|
||||
.Using GridFsTemplate to read files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void readFilesFromGridFs() {
|
||||
GridFsResources[] txtFiles = operations.getResources("*.txt");
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database.
|
||||
@@ -2419,659 +2419,7 @@ GroupByResults<XObject> results = mongoTemplate.group(where("x").gt(0),
|
||||
keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class);
|
||||
----
|
||||
|
||||
[[mongo.aggregation]]
|
||||
== Aggregation Framework Support
|
||||
|
||||
Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2.
|
||||
|
||||
For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB.
|
||||
|
||||
[[mongo.aggregation.basic-concepts]]
|
||||
=== Basic Concepts
|
||||
|
||||
The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationDefinition`, and `AggregationResults`.
|
||||
|
||||
* `Aggregation`
|
||||
+
|
||||
An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class.
|
||||
+
|
||||
The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter.
|
||||
+
|
||||
* `TypedAggregation`
|
||||
+
|
||||
A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields.
|
||||
+
|
||||
At runtime, field references get checked against the given input type, considering potential `@Field` annotations.
|
||||
[NOTE]
|
||||
====
|
||||
Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`.
|
||||
====
|
||||
* `AggregationDefinition`
|
||||
+
|
||||
An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`.
|
||||
+
|
||||
* `AggregationResults`
|
||||
+
|
||||
`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation.
|
||||
+
|
||||
The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework:
|
||||
+
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
Aggregation agg = newAggregation(
|
||||
pipelineOP1(),
|
||||
pipelineOP2(),
|
||||
pipelineOPn()
|
||||
);
|
||||
|
||||
AggregationResults<OutputType> results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class);
|
||||
List<OutputType> mappedResult = results.getMappedResults();
|
||||
----
|
||||
|
||||
Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence.
|
||||
|
||||
[[mongo.aggregation.supported-aggregation-operations]]
|
||||
=== Supported Aggregation Operations
|
||||
|
||||
The MongoDB Aggregation Framework provides the following types of aggregation operations:
|
||||
|
||||
* Pipeline Aggregation Operators
|
||||
* Group Aggregation Operators
|
||||
* Boolean Aggregation Operators
|
||||
* Comparison Aggregation Operators
|
||||
* Arithmetic Aggregation Operators
|
||||
* String Aggregation Operators
|
||||
* Date Aggregation Operators
|
||||
* Array Aggregation Operators
|
||||
* Conditional Aggregation Operators
|
||||
* Lookup Aggregation Operators
|
||||
* Convert Aggregation Operators
|
||||
* Object Aggregation Operators
|
||||
* Script Aggregation Operators
|
||||
|
||||
At the time of this writing, we provide support for the following Aggregation Operations in Spring Data MongoDB:
|
||||
|
||||
.Aggregation Operations currently supported by Spring Data MongoDB
|
||||
[cols="2*"]
|
||||
|===
|
||||
| Pipeline Aggregation Operators
|
||||
| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind`
|
||||
|
||||
| Set Aggregation Operators
|
||||
| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue`
|
||||
|
||||
| Group Aggregation Operators
|
||||
| `addToSet`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp`
|
||||
|
||||
| Arithmetic Aggregation Operators
|
||||
| `abs`, `add` (*via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc`
|
||||
|
||||
| String Aggregation Operators
|
||||
| `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim`
|
||||
|
||||
| Comparison Aggregation Operators
|
||||
| `eq` (*via: `is`), `gt`, `gte`, `lt`, `lte`, `ne`
|
||||
|
||||
| Array Aggregation Operators
|
||||
| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip`
|
||||
|
||||
| Literal Operators
|
||||
| `literal`
|
||||
|
||||
| Date Aggregation Operators
|
||||
| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear`
|
||||
|
||||
| Variable Operators
|
||||
| `map`
|
||||
|
||||
| Conditional Aggregation Operators
|
||||
| `cond`, `ifNull`, `switch`
|
||||
|
||||
| Type Aggregation Operators
|
||||
| `type`
|
||||
|
||||
| Convert Aggregation Operators
|
||||
| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString`
|
||||
|
||||
| Object Aggregation Operators
|
||||
| `objectToArray`, `mergeObjects`
|
||||
|
||||
| Script Aggregation Operators
|
||||
| `function`, `accumulator`
|
||||
|===
|
||||
|
||||
* The operation is mapped or added by Spring Data MongoDB.
|
||||
|
||||
Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions.
|
||||
|
||||
[[mongo.aggregation.projection]]
|
||||
=== Projection Expressions
|
||||
|
||||
Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method.
|
||||
Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression:
|
||||
|
||||
.Projection expression examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$project: {name: 1, netPrice: 1}}
|
||||
project("name", "netPrice")
|
||||
|
||||
// generates {$project: {thing1: $thing2}}
|
||||
project().and("thing1").as("thing2")
|
||||
|
||||
// generates {$project: {a: 1, b: 1, thing2: $thing1}}
|
||||
project("a","b").and("thing1").as("thing2")
|
||||
----
|
||||
====
|
||||
|
||||
.Multi-Stage Aggregation using Projection and Sorting
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}}
|
||||
project("name", "netPrice"), sort(ASC, "name")
|
||||
|
||||
// generates {$project: {name: $firstname}}, {$sort: {name: 1}}
|
||||
project().and("firstname").as("name"), sort(ASC, "name")
|
||||
|
||||
// does not work
|
||||
project().and("firstname").as("name"), sort(ASC, "firstname")
|
||||
----
|
||||
====
|
||||
|
||||
More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.facet]]
|
||||
=== Faceted Classification
|
||||
|
||||
As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times.
|
||||
|
||||
==== Buckets
|
||||
|
||||
Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output.
|
||||
|
||||
`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations:
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}}
|
||||
bucket("price").withBoundaries(0, 100, 400);
|
||||
|
||||
// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}}
|
||||
bucket("price").withBoundaries(0, 100).withDefault("Other");
|
||||
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutputCount().as("count");
|
||||
|
||||
// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations:
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5}}
|
||||
bucketAuto("price", 5)
|
||||
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}}
|
||||
bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other");
|
||||
|
||||
// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}}
|
||||
bucketAuto("price", 5).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <<mongo.aggregation.projection.expressions, SpEL expressions>> through `andOutputExpression()`.
|
||||
|
||||
Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and
|
||||
https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
==== Multi-faceted Aggregation
|
||||
|
||||
Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors.
|
||||
|
||||
You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents.
|
||||
|
||||
Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples:
|
||||
|
||||
.Facet operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}}
|
||||
facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice"))
|
||||
|
||||
// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}}
|
||||
facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry"))
|
||||
|
||||
// generates {$facet: {categorizedByYear: [
|
||||
// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}},
|
||||
// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}}
|
||||
// ]}}
|
||||
facet(project("title").and("publicationDate").extractYear().as("publicationYear"),
|
||||
bucketAuto("publicationYear", 5).andOutput("title").push().as("titles"))
|
||||
.as("categorizedByYear"))
|
||||
----
|
||||
====
|
||||
|
||||
Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.sort-by-count]]
|
||||
==== Sort By Count
|
||||
|
||||
Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <<mongo.aggregation.facet>>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example:
|
||||
|
||||
.Sort by count example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// generates { $sortByCount: "$country" }
|
||||
sortByCount("country");
|
||||
----
|
||||
====
|
||||
|
||||
A sort by count operation is equivalent to the following BSON (Binary JSON):
|
||||
|
||||
----
|
||||
{ $group: { _id: <expression>, count: { $sum: 1 } } },
|
||||
{ $sort: { count: -1 } }
|
||||
----
|
||||
|
||||
[[mongo.aggregation.projection.expressions]]
|
||||
==== Spring Expression Support in Projection Expressions
|
||||
|
||||
We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations.
|
||||
|
||||
===== Complex Calculations with SpEL expressions
|
||||
|
||||
Consider the following SpEL expression:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
1 + (q + 1) / (q - 1)
|
||||
----
|
||||
|
||||
The preceding expression is translated into the following projection expression part:
|
||||
|
||||
[source,javascript]
|
||||
----
|
||||
{ "$add" : [ 1, {
|
||||
"$divide" : [ {
|
||||
"$add":["$q", 1]}, {
|
||||
"$subtract":[ "$q", 1]}
|
||||
]
|
||||
}]}
|
||||
----
|
||||
|
||||
You can see examples in more context in <<mongo.aggregation.examples.example5>> and <<mongo.aggregation.examples.example6>>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB:
|
||||
|
||||
.Supported SpEL transformations
|
||||
[%header,cols="2"]
|
||||
|===
|
||||
| SpEL Expression
|
||||
| Mongo Expression Part
|
||||
| a == b
|
||||
| { $eq : [$a, $b] }
|
||||
| a != b
|
||||
| { $ne : [$a , $b] }
|
||||
| a > b
|
||||
| { $gt : [$a, $b] }
|
||||
| a >= b
|
||||
| { $gte : [$a, $b] }
|
||||
| a < b
|
||||
| { $lt : [$a, $b] }
|
||||
| a <= b
|
||||
| { $lte : [$a, $b] }
|
||||
| a + b
|
||||
| { $add : [$a, $b] }
|
||||
| a - b
|
||||
| { $subtract : [$a, $b] }
|
||||
| a * b
|
||||
| { $multiply : [$a, $b] }
|
||||
| a / b
|
||||
| { $divide : [$a, $b] }
|
||||
| a^b
|
||||
| { $pow : [$a, $b] }
|
||||
| a % b
|
||||
| { $mod : [$a, $b] }
|
||||
| a && b
|
||||
| { $and : [$a, $b] }
|
||||
| a \|\| b
|
||||
| { $or : [$a, $b] }
|
||||
| !a
|
||||
| { $not : [$a] }
|
||||
|===
|
||||
|
||||
In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// { $setEquals : [$a, [5, 8, 13] ] }
|
||||
.andExpression("setEquals(a, new int[]{5, 8, 13})");
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples]]
|
||||
==== Aggregation Framework Examples
|
||||
|
||||
The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB.
|
||||
|
||||
[[mongo.aggregation.examples.example1]]
|
||||
===== Aggregation Framework Example 1
|
||||
|
||||
In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class TagCount {
|
||||
String tag;
|
||||
int n;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
Aggregation agg = newAggregation(
|
||||
project("tags"),
|
||||
unwind("tags"),
|
||||
group("tags").count().as("n"),
|
||||
project("n").and("tag").previousOperation(),
|
||||
sort(DESC, "n")
|
||||
);
|
||||
|
||||
AggregationResults<TagCount> results = mongoTemplate.aggregate(agg, "tags", TagCount.class);
|
||||
List<TagCount> tagCount = results.getMappedResults();
|
||||
----
|
||||
|
||||
The preceding listing uses the following algorithm:
|
||||
|
||||
. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`.
|
||||
. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection.
|
||||
. Use the `unwind` operation to generate a new document for each tag within the `tags` array.
|
||||
. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`).
|
||||
. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`.
|
||||
. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order.
|
||||
. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument.
|
||||
|
||||
Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example2]]
|
||||
===== Aggregation Framework Example 2
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class ZipInfo {
|
||||
String id;
|
||||
String city;
|
||||
String state;
|
||||
@Field("pop") int population;
|
||||
@Field("loc") double[] location;
|
||||
}
|
||||
|
||||
class City {
|
||||
String name;
|
||||
int population;
|
||||
}
|
||||
|
||||
class ZipInfoStats {
|
||||
String id;
|
||||
String state;
|
||||
City biggestCity;
|
||||
City smallestCity;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<ZipInfo> aggregation = newAggregation(ZipInfo.class,
|
||||
group("state", "city")
|
||||
.sum("population").as("pop"),
|
||||
sort(ASC, "pop", "state", "city"),
|
||||
group("state")
|
||||
.last("city").as("biggestCity")
|
||||
.last("pop").as("biggestPop")
|
||||
.first("city").as("smallestCity")
|
||||
.first("pop").as("smallestPop"),
|
||||
project()
|
||||
.and("state").previousOperation()
|
||||
.and("biggestCity")
|
||||
.nested(bind("name", "biggestCity").and("population", "biggestPop"))
|
||||
.and("smallestCity")
|
||||
.nested(bind("name", "smallestCity").and("population", "smallestPop")),
|
||||
sort(ASC, "state")
|
||||
);
|
||||
|
||||
AggregationResults<ZipInfoStats> result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class);
|
||||
ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0);
|
||||
----
|
||||
|
||||
Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format.
|
||||
|
||||
The preceding listings use the following algorithm:
|
||||
|
||||
. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field.
|
||||
. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled).
|
||||
. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation.
|
||||
. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method.
|
||||
. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation.
|
||||
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example3]]
|
||||
===== Aggregation Framework Example 3
|
||||
|
||||
This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering).
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class StateStats {
|
||||
@Id String id;
|
||||
String state;
|
||||
@Field("totalPop") int totalPopulation;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<ZipInfo> agg = newAggregation(ZipInfo.class,
|
||||
group("state").sum("population").as("totalPop"),
|
||||
sort(ASC, previousOperation(), "totalPop"),
|
||||
match(where("totalPop").gte(10 * 1000 * 1000))
|
||||
);
|
||||
|
||||
AggregationResults<StateStats> result = mongoTemplate.aggregate(agg, StateStats.class);
|
||||
List<StateStats> stateStatsList = result.getMappedResults();
|
||||
----
|
||||
|
||||
The preceding listings use the following algorithm:
|
||||
|
||||
. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`.
|
||||
. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order.
|
||||
. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument.
|
||||
|
||||
Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example4]]
|
||||
===== Aggregation Framework Example 4
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations in the projection operation.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.and("netPrice").plus(1).as("netPricePlus1")
|
||||
.and("netPrice").minus(1).as("netPriceMinus1")
|
||||
.and("netPrice").multiply(1.19).as("grossPrice")
|
||||
.and("netPrice").divide(2).as("netPriceDiv2")
|
||||
.and("spaceUnits").mod(2).as("spaceUnitsMod2")
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method.
|
||||
|
||||
[[mongo.aggregation.examples.example5]]
|
||||
===== Aggregation Framework Example 5
|
||||
|
||||
This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.andExpression("netPrice + 1").as("netPricePlus1")
|
||||
.andExpression("netPrice - 1").as("netPriceMinus1")
|
||||
.andExpression("netPrice / 2").as("netPriceDiv2")
|
||||
.andExpression("netPrice * 1.19").as("grossPrice")
|
||||
.andExpression("spaceUnits % 2").as("spaceUnitsMod2")
|
||||
.andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge")
|
||||
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
[[mongo.aggregation.examples.example6]]
|
||||
===== Aggregation Framework Example 6
|
||||
|
||||
This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation.
|
||||
|
||||
Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
class Product {
|
||||
String id;
|
||||
String name;
|
||||
double netPrice;
|
||||
int spaceUnits;
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
double shippingCosts = 1.2;
|
||||
|
||||
TypedAggregation<Product> agg = newAggregation(Product.class,
|
||||
project("name", "netPrice")
|
||||
.andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice")
|
||||
);
|
||||
|
||||
AggregationResults<Document> result = mongoTemplate.aggregate(agg, Document.class);
|
||||
List<Document> resultList = result.getMappedResults();
|
||||
----
|
||||
|
||||
Note that we can also refer to other fields of the document within the SpEL expression.
|
||||
|
||||
[[mongo.aggregation.examples.example7]]
|
||||
===== Aggregation Framework Example 7
|
||||
|
||||
This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation].
|
||||
|
||||
[source,java]
|
||||
----
|
||||
public class InventoryItem {
|
||||
|
||||
@Id int id;
|
||||
String item;
|
||||
String description;
|
||||
int qty;
|
||||
}
|
||||
|
||||
public class InventoryItemProjection {
|
||||
|
||||
@Id int id;
|
||||
String item;
|
||||
String description;
|
||||
int qty;
|
||||
int discount
|
||||
}
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
TypedAggregation<InventoryItem> agg = newAggregation(InventoryItem.class,
|
||||
project("item").and("discount")
|
||||
.applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250))
|
||||
.then(30)
|
||||
.otherwise(20))
|
||||
.and(ifNull("description", "Unspecified")).as("description")
|
||||
);
|
||||
|
||||
AggregationResults<InventoryItemProjection> result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class);
|
||||
List<InventoryItemProjection> stateStatsList = result.getMappedResults();
|
||||
----
|
||||
|
||||
This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description.
|
||||
|
||||
As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression.
|
||||
|
||||
.Conditional aggregation projection
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
TypedAggregation<Book> agg = Aggregation.newAggregation(Book.class,
|
||||
project("title")
|
||||
.and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1>
|
||||
.equalToValue("")) <2>
|
||||
.then("$$REMOVE") <3>
|
||||
.otherwiseValueOf("author.middle") <4>
|
||||
)
|
||||
.as("author.middle"));
|
||||
----
|
||||
<1> If the value of the field `author.middle`
|
||||
<2> does not contain a value,
|
||||
<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field.
|
||||
<4> Otherwise, add the field value of `author.middle`.
|
||||
====
|
||||
include::aggregation-framework.adoc[]
|
||||
|
||||
[[mongo-template.index-and-collections]]
|
||||
== Index and Collection Management
|
||||
@@ -3264,121 +2612,6 @@ boolean hasIndex = template.execute("geolocation", new CollectionCallbackBoolean
|
||||
});
|
||||
----
|
||||
|
||||
[[gridfs]]
|
||||
== GridFS Support
|
||||
|
||||
MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows:
|
||||
|
||||
.JavaConfig setup for a GridFsTemplate
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsConfiguration extends AbstractMongoClientConfiguration {
|
||||
|
||||
// … further configuration omitted
|
||||
|
||||
@Bean
|
||||
public GridFsTemplate gridFsTemplate() {
|
||||
return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The corresponding XML configuration follows:
|
||||
|
||||
.XML configuration for a GridFsTemplate
|
||||
====
|
||||
[source,xml]
|
||||
----
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo
|
||||
https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/beans
|
||||
https://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||
|
||||
<mongo:db-factory id="mongoDbFactory" dbname="database" />
|
||||
<mongo:mapping-converter id="converter" />
|
||||
|
||||
<bean class="org.springframework.data.mongodb.gridfs.GridFsTemplate">
|
||||
<constructor-arg ref="mongoDbFactory" />
|
||||
<constructor-arg ref="converter" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
----
|
||||
====
|
||||
|
||||
The template can now be injected and used to perform storage and retrieval operations, as the following example shows:
|
||||
|
||||
.Using GridFsTemplate to store files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void storeFileToGridFs() {
|
||||
|
||||
FileMetadata metadata = new FileMetadata();
|
||||
// populate metadata
|
||||
Resource file = … // lookup File or Resource
|
||||
|
||||
operations.store(file.getInputStream(), "filename.txt", metadata);
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`.
|
||||
|
||||
You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files:
|
||||
|
||||
.Using GridFsTemplate to query for files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void findFilesInGridFs() {
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")))
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded.
|
||||
|
||||
The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files:
|
||||
|
||||
.Using GridFsTemplate to read files
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
class GridFsClient {
|
||||
|
||||
@Autowired
|
||||
GridFsOperations operations;
|
||||
|
||||
@Test
|
||||
public void readFilesFromGridFs() {
|
||||
GridFsResources[] txtFiles = operations.getResources("*.txt");
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database.
|
||||
|
||||
include::gridfs.adoc[]
|
||||
include::tailable-cursors.adoc[]
|
||||
include::change-streams.adoc[]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 3.2.3 (2021.0.3)
|
||||
Spring Data MongoDB 3.2.8 (2021.0.8)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -24,6 +24,11 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user