Compare commits
42 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b39b2591b6 | ||
|
|
65c8317e38 | ||
|
|
9d0f7bac6a | ||
|
|
6f50747d21 | ||
|
|
5cf1578ad3 | ||
|
|
78a59c45ca | ||
|
|
dccdfc8b4d | ||
|
|
e48239eb8f | ||
|
|
c3b4f61d29 | ||
|
|
22ed860b4a | ||
|
|
bf642ad3f7 | ||
|
|
fcd48539ea | ||
|
|
bf10f72a57 | ||
|
|
1c652cce1c | ||
|
|
dc2de878bc | ||
|
|
00cacc02ac | ||
|
|
811c2e5d7b | ||
|
|
200f3006bd | ||
|
|
1d6bea51ec | ||
|
|
7779ded45c | ||
|
|
918bf7c138 | ||
|
|
abe3b9f6d7 | ||
|
|
41c453cc83 | ||
|
|
77784d88c7 | ||
|
|
263c62c880 | ||
|
|
24ab8f67bb | ||
|
|
572ceb867e | ||
|
|
b7caea8602 | ||
|
|
3696f2144f | ||
|
|
b25c8acca6 | ||
|
|
00d6271468 | ||
|
|
bb603ba7b9 | ||
|
|
02eaa4cbd2 | ||
|
|
7429503c63 | ||
|
|
82f4e2276b | ||
|
|
e1bce7d942 | ||
|
|
8bf3d395be | ||
|
|
d3c00a93c0 | ||
|
|
0aa805e1a2 | ||
|
|
9dc1df3deb | ||
|
|
92a73a5cc0 | ||
|
|
910d66afb0 |
8
pom.xml
8
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.3</version>
|
||||
<version>3.1.6</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.4.3</version>
|
||||
<version>2.4.6</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.4.3</springdata.commons>
|
||||
<mongo>4.1.1</mongo>
|
||||
<springdata.commons>2.4.6</springdata.commons>
|
||||
<mongo>4.1.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.3</version>
|
||||
<version>3.1.6</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.3</version>
|
||||
<version>3.1.6</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.3</version>
|
||||
<version>3.1.6</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -26,7 +26,6 @@ import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
@@ -140,8 +139,7 @@ public abstract class MongoConfigurationSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and
|
||||
* {@link Persistent}.
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}.
|
||||
*
|
||||
* @param basePackage must not be {@literal null}.
|
||||
* @return
|
||||
@@ -161,7 +159,6 @@ public abstract class MongoConfigurationSupport {
|
||||
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
|
||||
false);
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
|
||||
|
||||
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
|
||||
|
||||
|
||||
@@ -125,6 +125,11 @@ public interface ExecutableFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <p />
|
||||
* This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation
|
||||
* execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard,
|
||||
* session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link MongoOperations#estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @return total number of matching elements.
|
||||
*/
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.BsonInvalidOperationException;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
@@ -39,6 +40,7 @@ import org.springframework.util.ClassUtils;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoServerException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
|
||||
/**
|
||||
@@ -49,6 +51,7 @@ import com.mongodb.bulk.BulkWriteError;
|
||||
* @author Oliver Gierke
|
||||
* @author Michal Vich
|
||||
* @author Christoph Strobl
|
||||
* @author Brice Vandeputte
|
||||
*/
|
||||
public class MongoExceptionTranslator implements PersistenceExceptionTranslator {
|
||||
|
||||
@@ -78,6 +81,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoSocketException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
String exception = ClassUtils.getShortName(ClassUtils.getUserClass(ex.getClass()));
|
||||
|
||||
if (DUPLICATE_KEY_EXCEPTIONS.contains(exception)) {
|
||||
|
||||
@@ -1160,6 +1160,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1176,6 +1182,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1187,6 +1199,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the estimated number of documents.
|
||||
@@ -1200,6 +1215,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @return the estimated number of documents.
|
||||
@@ -1214,6 +1232,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
|
||||
@@ -28,7 +28,6 @@ import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
@@ -3454,7 +3453,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated since 3.1.4. Use {@link #getMongoDatabaseFactory()} instead.
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoDatabaseFactory getMongoDbFactory() {
|
||||
return getMongoDatabaseFactory();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
* @since 3.1.4
|
||||
*/
|
||||
public MongoDatabaseFactory getMongoDatabaseFactory() {
|
||||
return mongoDbFactory;
|
||||
}
|
||||
|
||||
|
||||
@@ -658,7 +658,8 @@ class QueryOperations {
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
|
||||
getMappedShardKeyFields(domainType)
|
||||
.forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue(shardKeySource, key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
|
||||
@@ -106,6 +106,12 @@ public interface ReactiveFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but
|
||||
* guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications
|
||||
* needs use {@link ReactiveMongoOperations#estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @return {@link Mono} emitting total number of matching elements. Never {@literal null}.
|
||||
*/
|
||||
|
||||
@@ -940,6 +940,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -956,6 +962,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -971,6 +983,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -983,6 +1001,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting the estimated number of documents.
|
||||
@@ -996,6 +1017,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting the estimated number of documents.
|
||||
|
||||
@@ -2730,6 +2730,14 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return potentiallyForceAcknowledgedWrite(wc);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
* @since 3.1.4
|
||||
*/
|
||||
public ReactiveMongoDatabaseFactory getMongoDatabaseFactory() {
|
||||
return mongoDatabaseFactory;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc) {
|
||||
|
||||
|
||||
@@ -17,17 +17,16 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -110,28 +109,7 @@ class DocumentAccessor {
|
||||
*/
|
||||
@Nullable
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
Map<String, Object> map = BsonUtils.asMap(document);
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return map.get(fieldName);
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<String, Object> source = map;
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
|
||||
result = source.get(parts.next());
|
||||
|
||||
if (parts.hasNext()) {
|
||||
source = getAsMap(result);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return BsonUtils.resolveValue(document, property.getFieldName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -157,71 +135,7 @@ class DocumentAccessor {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
|
||||
if (this.document instanceof Document) {
|
||||
|
||||
if (((Document) this.document).containsKey(fieldName)) {
|
||||
return true;
|
||||
}
|
||||
} else if (this.document instanceof DBObject) {
|
||||
if (((DBObject) this.document).containsField(fieldName)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String[] parts = fieldName.split("\\.");
|
||||
Map<String, Object> source;
|
||||
|
||||
if (this.document instanceof Document) {
|
||||
source = ((Document) this.document);
|
||||
} else {
|
||||
source = ((DBObject) this.document).toMap();
|
||||
}
|
||||
|
||||
Object result = null;
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
result = source.get(parts[i - 1]);
|
||||
source = getAsMap(result);
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return source.containsKey(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given source object as map, i.e. {@link Document}s and maps as is or {@literal null} otherwise.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
return BsonUtils.hasValue(document, property.getFieldName());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -32,6 +32,9 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.Document;
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.bson.internal.CodecRegistryHelper;
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.Decimal128;
|
||||
@@ -45,11 +48,12 @@ import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.core.query.Term;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
|
||||
/**
|
||||
* Wrapper class to contain useful converters for the usage with Mongo.
|
||||
*
|
||||
@@ -236,9 +240,13 @@ abstract class MongoConverters {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
private final Codec<Document> codec = CodecRegistryHelper
|
||||
.createRegistry(MongoClientSettings.getDefaultCodecRegistry(), UuidRepresentation.JAVA_LEGACY)
|
||||
.get(Document.class);
|
||||
|
||||
@Override
|
||||
public String convert(Document source) {
|
||||
return source.toJson();
|
||||
return source.toJson(codec);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1152,7 +1152,8 @@ public class QueryMapper {
|
||||
|
||||
private boolean isPathToJavaLangClassProperty(PropertyPath path) {
|
||||
|
||||
if (path.getType().equals(Class.class) && path.getLeafProperty().getOwningType().getType().equals(Class.class)) {
|
||||
if ((path.getType() == Class.class || path.getType().equals(Object.class))
|
||||
&& path.getLeafProperty().getType() == Class.class) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -1261,9 +1262,9 @@ public class QueryMapper {
|
||||
|
||||
String partial = iterator.next();
|
||||
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike();
|
||||
|
||||
if (isPositional) {
|
||||
if (isPositional || property.isMap()) {
|
||||
mappedName.append(".").append(partial);
|
||||
}
|
||||
|
||||
|
||||
@@ -124,7 +124,12 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link Criteria} matching an example object.
|
||||
* Static factory method to create a {@link Criteria} matching an example object. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] } </code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @return new instance of {@link Criteria}.
|
||||
@@ -615,8 +620,15 @@ public class Criteria implements CriteriaDefinition {
|
||||
*/
|
||||
public Criteria alike(Example<?> sample) {
|
||||
|
||||
criteria.put("$example", sample);
|
||||
return this;
|
||||
if (StringUtils.hasText(this.getKey())) {
|
||||
|
||||
criteria.put("$example", sample);
|
||||
return this;
|
||||
}
|
||||
|
||||
Criteria exampleCriteria = new Criteria();
|
||||
exampleCriteria.criteria.put("$example", sample);
|
||||
return registerCriteriaChainElement(exampleCriteria);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
@@ -102,6 +103,15 @@ public enum MongoRegexCreator {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source
|
||||
* @return
|
||||
* @since 2.2.14
|
||||
*/
|
||||
public Object toCaseInsensitiveMatch(Object source) {
|
||||
return source instanceof String ? new BsonRegularExpression(Pattern.quote((String) source), "i") : source;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, MatchMode matcherType) {
|
||||
|
||||
if (MatchMode.REGEX == matcherType) {
|
||||
|
||||
@@ -78,16 +78,31 @@ public interface MongoRepository<T, ID> extends PagingAndSortingRepository<T, ID
|
||||
*/
|
||||
<S extends T> List<S> insert(Iterable<S> entities);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/**
|
||||
* Returns all entities matching the given {@link Example}. In case no match could be found an empty {@link List} is
|
||||
* returned. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] }</code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> List<S> findAll(Example<S> example);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
|
||||
/**
|
||||
* Returns all entities matching the given {@link Example} applying the given {@link Sort}. In case no match could be
|
||||
* found an empty {@link List} is returned. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] }</code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example,
|
||||
* org.springframework.data.domain.Sort)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> List<S> findAll(Example<S> example, Sort sort);
|
||||
|
||||
@@ -64,16 +64,33 @@ public interface ReactiveMongoRepository<T, ID> extends ReactiveSortingRepositor
|
||||
*/
|
||||
<S extends T> Flux<S> insert(Publisher<S> entities);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
|
||||
/**
|
||||
* Returns all entities matching the given {@link Example}. In case no match could be found an empty {@link Flux} is
|
||||
* returned. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] }</code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> Flux<S> findAll(Example<S> example);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
|
||||
/**
|
||||
* Returns all entities matching the given {@link Example} applying the given {@link Sort}. In case no match could be
|
||||
* found an empty {@link Flux} is returned. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] }</code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findAll(org.springframework.data.domain.Example,
|
||||
* org.springframework.data.domain.Sort)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> Flux<S> findAll(Example<S> example, Sort sort);
|
||||
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ import java.util.regex.Pattern;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Range.Bound;
|
||||
import org.springframework.data.domain.Sort;
|
||||
@@ -51,8 +50,10 @@ import org.springframework.data.repository.query.parser.Part;
|
||||
import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom query creator to create Mongo criterias.
|
||||
@@ -196,9 +197,9 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
case IS_NULL:
|
||||
return criteria.is(null);
|
||||
case NOT_IN:
|
||||
return criteria.nin(nextAsArray(parameters));
|
||||
return criteria.nin(nextAsList(parameters, part));
|
||||
case IN:
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
return criteria.in(nextAsList(parameters, part));
|
||||
case LIKE:
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
@@ -337,7 +338,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
Iterator<Object> parameters) {
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
return criteria.in(nextAsList(parameters, part));
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next());
|
||||
@@ -400,17 +401,24 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(Iterator<Object> iterator) {
|
||||
private java.util.List<?> nextAsList(Iterator<Object> iterator, Part part) {
|
||||
|
||||
Object next = iterator.next();
|
||||
|
||||
if (next instanceof Collection) {
|
||||
return ((Collection<?>) next).toArray();
|
||||
} else if (next != null && next.getClass().isArray()) {
|
||||
return (Object[]) next;
|
||||
Streamable<?> streamable = asStreamable(iterator.next());
|
||||
if (!isSimpleComparisionPossible(part)) {
|
||||
streamable = streamable.map(MongoRegexCreator.INSTANCE::toCaseInsensitiveMatch);
|
||||
}
|
||||
|
||||
return new Object[] { next };
|
||||
return streamable.toList();
|
||||
}
|
||||
|
||||
private Streamable<?> asStreamable(Object value) {
|
||||
|
||||
if (value instanceof Collection) {
|
||||
return Streamable.of((Collection<?>) value);
|
||||
} else if (ObjectUtils.isArray(value)) {
|
||||
return Streamable.of((Object[]) value);
|
||||
}
|
||||
return Streamable.of(value);
|
||||
}
|
||||
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
@@ -282,6 +282,109 @@ public class BsonUtils {
|
||||
.orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry())));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a the value for a given key. If the given {@link Bson} value contains the key the value is immediately
|
||||
* returned. If not and the key contains a path using the dot ({@code .}) notation it will try to resolve the path by
|
||||
* inspecting the individual parts. If one of the intermediate ones is {@literal null} or cannot be inspected further
|
||||
* (wrong) type, {@literal null} is returned.
|
||||
*
|
||||
* @param bson the source to inspect. Must not be {@literal null}.
|
||||
* @param key the key to lookup. Must not be {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
* @since 3.0.8
|
||||
*/
|
||||
@Nullable
|
||||
public static Object resolveValue(Bson bson, String key) {
|
||||
|
||||
Map<String, Object> source = asMap(bson);
|
||||
|
||||
if (source.containsKey(key) || !key.contains(".")) {
|
||||
return source.get(key);
|
||||
}
|
||||
|
||||
String[] parts = key.split("\\.");
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
Object result = source.get(parts[i - 1]);
|
||||
|
||||
if (!(result instanceof Bson)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
source = asMap((Bson) result);
|
||||
}
|
||||
|
||||
return source.get(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the underlying {@link Bson bson} has a value ({@literal null} or non-{@literal null}) for the given
|
||||
* {@code key}.
|
||||
*
|
||||
* @param bson the source to inspect. Must not be {@literal null}.
|
||||
* @param key the key to lookup. Must not be {@literal null}.
|
||||
* @return {@literal true} if no non {@literal null} value present.
|
||||
* @since 3.0.8
|
||||
*/
|
||||
public static boolean hasValue(Bson bson, String key) {
|
||||
|
||||
Map<String, Object> source = asMap(bson);
|
||||
|
||||
if (source.get(key) != null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!key.contains(".")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String[] parts = key.split("\\.");
|
||||
|
||||
Object result;
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
result = source.get(parts[i - 1]);
|
||||
source = getAsMap(result);
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return source.containsKey(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given source object as map, i.e. {@link Document}s and maps as is or {@literal null} otherwise.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof DBObject) {
|
||||
return ((DBObject) source).toMap();
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static String toJson(@Nullable Object value) {
|
||||
|
||||
|
||||
@@ -20,6 +20,8 @@ import com.mongodb.client.result.UpdateResult
|
||||
import com.mongodb.reactivestreams.client.MongoCollection
|
||||
import org.bson.Document
|
||||
import org.springframework.data.geo.GeoResult
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation
|
||||
import org.springframework.data.mongodb.core.index.ReactiveIndexOperations
|
||||
import org.springframework.data.mongodb.core.query.NearQuery
|
||||
import org.springframework.data.mongodb.core.query.Query
|
||||
@@ -210,6 +212,52 @@ inline fun <reified T : Any, reified E : Any> ReactiveMongoOperations.findDistin
|
||||
if (collectionName != null) findDistinct(query, field, collectionName, E::class.java, T::class.java)
|
||||
else findDistinct(query, field, E::class.java, T::class.java)
|
||||
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters.
|
||||
*
|
||||
* @author Wonwoo Lee
|
||||
* @since 3.1.4
|
||||
*/
|
||||
inline fun <reified O : Any> ReactiveMongoOperations.aggregate(
|
||||
aggregation: TypedAggregation<*>,
|
||||
collectionName: String
|
||||
): Flux<O> =
|
||||
this.aggregate(aggregation, collectionName, O::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters.
|
||||
*
|
||||
* @author Wonwoo Lee
|
||||
* @since 3.1.4
|
||||
*/
|
||||
inline fun <reified O : Any> ReactiveMongoOperations.aggregate(aggregation: TypedAggregation<*>): Flux<O> =
|
||||
this.aggregate(aggregation, O::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters.
|
||||
*
|
||||
* @author Wonwoo Lee
|
||||
* @author Mark Paluch
|
||||
* @since 3.1.4
|
||||
*/
|
||||
inline fun <reified I : Any, reified O : Any> ReactiveMongoOperations.aggregate(
|
||||
aggregation: Aggregation
|
||||
): Flux<O> =
|
||||
this.aggregate(aggregation, I::class.java, O::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters.
|
||||
*
|
||||
* @author Wonwoo Lee
|
||||
* @since 3.1.4
|
||||
*/
|
||||
inline fun <reified O : Any> ReactiveMongoOperations.aggregate(
|
||||
aggregation: Aggregation,
|
||||
collectionName: String
|
||||
): Flux<O> =
|
||||
this.aggregate(aggregation, collectionName, O::class.java)
|
||||
|
||||
/**
|
||||
* Extension for [ReactiveMongoOperations.geoNear] leveraging reified type parameters.
|
||||
*
|
||||
|
||||
@@ -17,8 +17,6 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -32,11 +30,14 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.MongoTransactionException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoCursorNotFoundException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoInternalException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.MongoSocketReadTimeoutException;
|
||||
import com.mongodb.MongoSocketWriteException;
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
/**
|
||||
@@ -45,18 +46,20 @@ import com.mongodb.ServerAddress;
|
||||
* @author Michal Vich
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Brice Vandeputte
|
||||
*/
|
||||
public class MongoExceptionTranslatorUnitTests {
|
||||
class MongoExceptionTranslatorUnitTests {
|
||||
|
||||
MongoExceptionTranslator translator;
|
||||
private static final String EXCEPTION_MESSAGE = "IOException";
|
||||
private MongoExceptionTranslator translator;
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
void setUp() {
|
||||
translator = new MongoExceptionTranslator();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateDuplicateKey() {
|
||||
void translateDuplicateKey() {
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(
|
||||
@@ -64,17 +67,33 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
DuplicateKeyException.class, null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateSocketException() {
|
||||
@Test // GH-3568
|
||||
void translateSocketException() {
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(new MongoSocketException("IOException", new ServerAddress())),
|
||||
DataAccessResourceFailureException.class, "IOException");
|
||||
translator.translateExceptionIfPossible(new MongoSocketException(EXCEPTION_MESSAGE, new ServerAddress())),
|
||||
DataAccessResourceFailureException.class, EXCEPTION_MESSAGE);
|
||||
}
|
||||
|
||||
@Test // GH-3568
|
||||
void translateSocketExceptionSubclasses() {
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(
|
||||
new MongoSocketWriteException("intermediate message", new ServerAddress(), new Exception(EXCEPTION_MESSAGE))
|
||||
),
|
||||
DataAccessResourceFailureException.class, EXCEPTION_MESSAGE);
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(
|
||||
new MongoSocketReadTimeoutException("intermediate message", new ServerAddress(), new Exception(EXCEPTION_MESSAGE))
|
||||
),
|
||||
DataAccessResourceFailureException.class, EXCEPTION_MESSAGE);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateCursorNotFound() throws UnknownHostException {
|
||||
void translateCursorNotFound() {
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(new MongoCursorNotFoundException(1L, new ServerAddress())),
|
||||
@@ -82,21 +101,21 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateToDuplicateKeyException() {
|
||||
void translateToDuplicateKeyException() {
|
||||
|
||||
checkTranslatedMongoException(DuplicateKeyException.class, 11000);
|
||||
checkTranslatedMongoException(DuplicateKeyException.class, 11001);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateToDataAccessResourceFailureException() {
|
||||
void translateToDataAccessResourceFailureException() {
|
||||
|
||||
checkTranslatedMongoException(DataAccessResourceFailureException.class, 12000);
|
||||
checkTranslatedMongoException(DataAccessResourceFailureException.class, 13440);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateToInvalidDataAccessApiUsageException() {
|
||||
void translateToInvalidDataAccessApiUsageException() {
|
||||
|
||||
checkTranslatedMongoException(InvalidDataAccessApiUsageException.class, 10003);
|
||||
checkTranslatedMongoException(InvalidDataAccessApiUsageException.class, 12001);
|
||||
@@ -106,7 +125,7 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateToUncategorizedMongoDbException() {
|
||||
void translateToUncategorizedMongoDbException() {
|
||||
|
||||
MongoException exception = new MongoException(0, "");
|
||||
DataAccessException translatedException = translator.translateExceptionIfPossible(exception);
|
||||
@@ -115,7 +134,7 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateMongoInternalException() {
|
||||
void translateMongoInternalException() {
|
||||
|
||||
MongoInternalException exception = new MongoInternalException("Internal exception");
|
||||
DataAccessException translatedException = translator.translateExceptionIfPossible(exception);
|
||||
@@ -124,14 +143,14 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateUnsupportedException() {
|
||||
void translateUnsupportedException() {
|
||||
|
||||
RuntimeException exception = new RuntimeException();
|
||||
assertThat(translator.translateExceptionIfPossible(exception)).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2045
|
||||
public void translateSessionExceptions() {
|
||||
void translateSessionExceptions() {
|
||||
|
||||
checkTranslatedMongoException(ClientSessionException.class, 206);
|
||||
checkTranslatedMongoException(ClientSessionException.class, 213);
|
||||
@@ -140,7 +159,7 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2045
|
||||
public void translateTransactionExceptions() {
|
||||
void translateTransactionExceptions() {
|
||||
|
||||
checkTranslatedMongoException(MongoTransactionException.class, 217);
|
||||
checkTranslatedMongoException(MongoTransactionException.class, 225);
|
||||
@@ -163,13 +182,13 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
assertThat(((MongoException) cause).getCode()).isEqualTo(code);
|
||||
}
|
||||
|
||||
private static void expectExceptionWithCauseMessage(NestedRuntimeException e,
|
||||
private static void expectExceptionWithCauseMessage(@Nullable NestedRuntimeException e,
|
||||
Class<? extends NestedRuntimeException> type) {
|
||||
expectExceptionWithCauseMessage(e, type, null);
|
||||
}
|
||||
|
||||
private static void expectExceptionWithCauseMessage(NestedRuntimeException e,
|
||||
Class<? extends NestedRuntimeException> type, String message) {
|
||||
private static void expectExceptionWithCauseMessage(@Nullable NestedRuntimeException e,
|
||||
Class<? extends NestedRuntimeException> type, @Nullable String message) {
|
||||
|
||||
assertThat(e).isInstanceOf(type);
|
||||
|
||||
|
||||
@@ -84,6 +84,7 @@ import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.Sharded;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
@@ -1922,6 +1923,24 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
verify(findIterable, never()).first();
|
||||
}
|
||||
|
||||
@Test // GH-3590
|
||||
void shouldIncludeValueFromNestedShardKeyPath() {
|
||||
|
||||
WithShardKeyPointingToNested source = new WithShardKeyPointingToNested();
|
||||
source.id = "id-1";
|
||||
source.value = "v1";
|
||||
source.nested = new WithNamedFields();
|
||||
source.nested.customName = "cname";
|
||||
source.nested.name = "name";
|
||||
|
||||
template.save(source);
|
||||
|
||||
ArgumentCaptor<Bson> filter = ArgumentCaptor.forClass(Bson.class);
|
||||
verify(collection).replaceOne(filter.capture(), any(), any());
|
||||
|
||||
assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("value", "v1").append("nested.custom-named-field", "cname"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2341
|
||||
void saveShouldProjectOnShardKeyWhenLoadingExistingDocument() {
|
||||
|
||||
@@ -2267,6 +2286,13 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
@Field("firstname") String name;
|
||||
}
|
||||
|
||||
@Sharded(shardKey = {"value", "nested.customName"})
|
||||
static class WithShardKeyPointingToNested {
|
||||
String id;
|
||||
String value;
|
||||
WithNamedFields nested;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks out the {@link MongoTemplate#getDb()} method to return the {@link DB} mock instead of executing the actual
|
||||
* behaviour.
|
||||
|
||||
@@ -139,16 +139,14 @@ public class QueryByExampleTests {
|
||||
assertThat(result).containsExactlyInAnyOrder(p1, p2, p3);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1245
|
||||
@Test // DATAMONGO-1245, GH-3544
|
||||
public void findByExampleWithCriteria() {
|
||||
|
||||
Person sample = new Person();
|
||||
sample.lastname = "stark";
|
||||
|
||||
Query query = new Query(new Criteria().alike(Example.of(sample)).and("firstname").regex("^ary*"));
|
||||
|
||||
List<Person> result = operations.find(query, Person.class);
|
||||
assertThat(result).hasSize(1);
|
||||
Query query = new Query(new Criteria().alike(Example.of(sample)).and("firstname").regex(".*n.*"));
|
||||
assertThat(operations.find(query, Person.class)).containsExactly(p1);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1459
|
||||
|
||||
@@ -30,6 +30,8 @@ import java.time.LocalDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
|
||||
import javax.persistence.metamodel.EmbeddableType;
|
||||
|
||||
import org.assertj.core.api.Assertions;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.Decimal128;
|
||||
@@ -2179,6 +2181,15 @@ public class MappingMongoConverterUnitTests {
|
||||
assertThat(((LinkedHashMap) result.get("cluster")).get("_id")).isEqualTo(100L);
|
||||
}
|
||||
|
||||
@Test // GH-3546
|
||||
void readFlattensNestedDocumentToStringIfNecessary() {
|
||||
|
||||
org.bson.Document source = new org.bson.Document("street", new org.bson.Document("json", "string").append("_id", UUID.randomUUID()));
|
||||
|
||||
Address target = converter.read(Address.class, source);
|
||||
assertThat(target.street).isNotNull();
|
||||
}
|
||||
|
||||
static class GenericType<T> {
|
||||
T content;
|
||||
}
|
||||
|
||||
@@ -771,6 +771,19 @@ public class QueryMapperUnitTests {
|
||||
assertThat(document).containsEntry("legacyPoint.y", 20D);
|
||||
}
|
||||
|
||||
@Test // GH-3544
|
||||
void exampleWithCombinedCriteriaShouldBeMappedCorrectly() {
|
||||
|
||||
Foo probe = new Foo();
|
||||
probe.embedded = new EmbeddedClass();
|
||||
probe.embedded.id = "conflux";
|
||||
|
||||
Query query = query(byExample(probe).and("listOfItems").exists(true));
|
||||
org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class));
|
||||
|
||||
assertThat(document).containsEntry("embedded\\._id", "conflux").containsEntry("my_items", new org.bson.Document("$exists", true));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1988
|
||||
void mapsStringObjectIdRepresentationToObjectIdWhenReferencingIdProperty() {
|
||||
|
||||
|
||||
@@ -1089,6 +1089,38 @@ class UpdateMapperUnitTests {
|
||||
assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("aliased.$[element].value", 10)));
|
||||
}
|
||||
|
||||
@Test // GH-3552
|
||||
void numericKeyForMap() {
|
||||
|
||||
Update update = new Update().set("map.601218778970110001827396", "testing");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.601218778970110001827396\": \"testing\"}}");
|
||||
}
|
||||
|
||||
@Test // GH-3552
|
||||
void numericKeyInMapOfNestedPath() {
|
||||
|
||||
Update update = new Update().set("map.601218778970110001827396.value", "testing");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate)
|
||||
.isEqualTo("{\"$set\": {\"map.601218778970110001827396.value\": \"testing\"}}");
|
||||
}
|
||||
|
||||
@Test // GH-3566
|
||||
void mapsObjectClassPropertyFieldInMapValueTypeAsKey() {
|
||||
|
||||
Update update = new Update().set("map.class", "value");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate)
|
||||
.isEqualTo("{\"$set\": {\"map.class\": \"value\"}}");
|
||||
}
|
||||
|
||||
static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes {
|
||||
ListModelWrapper concreteTypeWithListAttributeOfInterfaceType;
|
||||
}
|
||||
|
||||
@@ -1363,4 +1363,19 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
assertThat(repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Dave")).containsExactly(dave);
|
||||
assertThat(repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Carter")).containsExactly(carter);
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
void caseInSensitiveInClause() {
|
||||
assertThat(repository.findByLastnameIgnoreCaseIn("bEAuFoRd", "maTTheWs")).hasSize(3);
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
void caseInSensitiveInClauseQuotesExpressions() {
|
||||
assertThat(repository.findByLastnameIgnoreCaseIn(".*")).isEmpty();
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
void caseSensitiveInClauseIgnoresExpressions() {
|
||||
assertThat(repository.findByFirstnameIn(".*")).isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,6 +125,8 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
@Query("{ 'lastname' : { '$regex' : '?0', '$options' : 'i'}}")
|
||||
Page<Person> findByLastnameLikeWithPageable(String lastname, Pageable pageable);
|
||||
|
||||
List<Person> findByLastnameIgnoreCaseIn(String... lastname);
|
||||
|
||||
/**
|
||||
* Returns all {@link Person}s with a firstname contained in the given varargs.
|
||||
*
|
||||
|
||||
@@ -19,6 +19,8 @@ import example.first.First
|
||||
import io.mockk.mockk
|
||||
import io.mockk.verify
|
||||
import org.junit.Test
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation
|
||||
import org.springframework.data.mongodb.core.query.NearQuery
|
||||
import org.springframework.data.mongodb.core.query.Query
|
||||
import org.springframework.data.mongodb.core.query.Update
|
||||
@@ -28,6 +30,7 @@ import reactor.core.publisher.Mono
|
||||
* @author Sebastien Deleuze
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Wonwoo Lee
|
||||
*/
|
||||
class ReactiveMongoOperationsExtensionsTests {
|
||||
|
||||
@@ -598,7 +601,6 @@ class ReactiveMongoOperationsExtensionsTests {
|
||||
verify { operations.findDistinct(query, "field", "collection", First::class.java, String::class.java) }
|
||||
}
|
||||
|
||||
|
||||
@Test // DATAMONGO-1761
|
||||
@Suppress("DEPRECATION")
|
||||
fun `findDistinct(Query, String, KClass) should call java counterpart`() {
|
||||
@@ -606,6 +608,55 @@ class ReactiveMongoOperationsExtensionsTests {
|
||||
val query = mockk<Query>()
|
||||
|
||||
operations.findDistinct<String>(query, "field", First::class)
|
||||
verify { operations.findDistinct(query, "field", First::class.java, String::class.java) }
|
||||
verify {
|
||||
operations.findDistinct(
|
||||
query,
|
||||
"field",
|
||||
First::class.java,
|
||||
String::class.java
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Test // #893
|
||||
fun `aggregate(TypedAggregation, String, KClass) should call java counterpart`() {
|
||||
|
||||
val aggregation = mockk<TypedAggregation<String>>()
|
||||
|
||||
operations.aggregate<First>(aggregation, "foo")
|
||||
verify { operations.aggregate(aggregation, "foo", First::class.java) }
|
||||
}
|
||||
|
||||
@Test // #893
|
||||
fun `aggregate(TypedAggregation, KClass) should call java counterpart`() {
|
||||
|
||||
val aggregation = mockk<TypedAggregation<String>>()
|
||||
|
||||
operations.aggregate<First>(aggregation)
|
||||
verify { operations.aggregate(aggregation, First::class.java) }
|
||||
}
|
||||
|
||||
@Test // #893
|
||||
fun `aggregate(Aggregation, KClass) should call java counterpart`() {
|
||||
|
||||
val aggregation = mockk<Aggregation>()
|
||||
|
||||
operations.aggregate<String, First>(aggregation)
|
||||
verify {
|
||||
operations.aggregate(
|
||||
aggregation,
|
||||
String::class.java,
|
||||
First::class.java
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Test // #893
|
||||
fun `aggregate(Aggregation, String) should call java counterpart`() {
|
||||
|
||||
val aggregation = mockk<Aggregation>()
|
||||
|
||||
operations.aggregate<First>(aggregation, "foo")
|
||||
verify { operations.aggregate(aggregation, "foo", First::class.java) }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1446,6 +1446,7 @@ The geo-near operations return a `GeoResults` wrapper object that encapsulates `
|
||||
|
||||
MongoDB supports https://geojson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data. See the https://docs.mongodb.org/manual/core/2dsphere/#geospatial-indexes-store-geojson/[MongoDB manual on GeoJSON support] to learn about requirements and restrictions.
|
||||
|
||||
[[mongo.geo-json.domain.classes]]
|
||||
==== GeoJSON Types in Domain Classes
|
||||
|
||||
Usage of https://geojson.org/[GeoJSON] types in domain classes is straightforward. The `org.springframework.data.mongodb.core.geo` package contains types such as `GeoJsonPoint`, `GeoJsonPolygon`, and others. These types are extend the existing `org.springframework.data.geo` types. The following example uses a `GeoJsonPoint`:
|
||||
@@ -1469,6 +1470,7 @@ public class Store {
|
||||
----
|
||||
====
|
||||
|
||||
[[mongo.geo-json.query-methods]]
|
||||
==== GeoJSON Types in Repository Query Methods
|
||||
|
||||
Using GeoJSON types as repository query parameters forces usage of the `$geometry` operator when creating the query, as the following example shows:
|
||||
@@ -1529,6 +1531,7 @@ repo.findByLocationWithin( <4>
|
||||
<4> Use the legacy format `$polygon` operator.
|
||||
====
|
||||
|
||||
[[mongo.geo-json.metrics]]
|
||||
==== Metrics and Distance calculation
|
||||
|
||||
Then MongoDB `$geoNear` operator allows usage of a GeoJSON Point or legacy coordinate pairs.
|
||||
@@ -1700,6 +1703,29 @@ Returning the 3 Documents just like the GeoJSON variant:
|
||||
<4> Distance from center point in _Kilometers_ - take it times 1000 to match _Meters_ of the GeoJSON variant.
|
||||
====
|
||||
|
||||
[[mongo.geo-json.jackson-modules]]
|
||||
==== GeoJSON Jackson Modules
|
||||
|
||||
By using the <<core.web>>, Spring Data registers additional Jackson ``Modules``s to the `ObjectMapper` for deserializing common Spring Data domain types.
|
||||
Please refer to the <<core.web.basic.jackson-mappers>> section to learn more about the infrastructure setup of this feature.
|
||||
|
||||
The MongoDB module additionally registers ``JsonDeserializer``s for the following GeoJSON types via its `GeoJsonConfiguration` exposing the `GeoJsonModule`.
|
||||
----
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonPoint
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonMultiPoint
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonLineString
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonMultiLineString
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonPolygon
|
||||
org.springframework.data.mongodb.core.geo.GeoJsonMultiPolygon
|
||||
----
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The `GeoJsonModule` only registers ``JsonDeserializer``s!
|
||||
|
||||
The next major version (`4.0`) will register both, ``JsonDeserializer``s and ``JsonSerializer``s for GeoJSON types by default.
|
||||
====
|
||||
|
||||
[[mongo.textsearch]]
|
||||
=== Full-text Queries
|
||||
|
||||
@@ -1731,7 +1757,7 @@ A query searching for `coffee cake` can be defined and run as follows:
|
||||
[source,java]
|
||||
----
|
||||
Query query = TextQuery
|
||||
.searching(new TextCriteria().matchingAny("coffee", "cake"));
|
||||
.queryText(new TextCriteria().matchingAny("coffee", "cake"));
|
||||
|
||||
List<Document> page = template.find(query, Document.class);
|
||||
----
|
||||
@@ -1744,7 +1770,7 @@ To sort results by relevance according to the `weights` use `TextQuery.sortBySco
|
||||
[source,java]
|
||||
----
|
||||
Query query = TextQuery
|
||||
.searching(new TextCriteria().matchingAny("coffee", "cake"))
|
||||
.queryText(new TextCriteria().matchingAny("coffee", "cake"))
|
||||
.sortByScore() <1>
|
||||
.includeScore(); <2>
|
||||
|
||||
@@ -1759,8 +1785,8 @@ You can exclude search terms by prefixing the term with `-` or by using `notMatc
|
||||
[source,java]
|
||||
----
|
||||
// search for 'coffee' and not 'cake'
|
||||
TextQuery.searching(new TextCriteria().matching("coffee").matching("-cake"));
|
||||
TextQuery.searching(new TextCriteria().matching("coffee").notMatching("cake"));
|
||||
TextQuery.queryText(new TextCriteria().matching("coffee").matching("-cake"));
|
||||
TextQuery.queryText(new TextCriteria().matching("coffee").notMatching("cake"));
|
||||
----
|
||||
|
||||
`TextCriteria.matching` takes the provided term as is. Therefore, you can define phrases by putting them between double quotation marks (for example, `\"coffee cake\")` or using by `TextCriteria.phrase.` The following example shows both ways of defining a phrase:
|
||||
@@ -1768,8 +1794,8 @@ TextQuery.searching(new TextCriteria().matching("coffee").notMatching("cake"));
|
||||
[source,java]
|
||||
----
|
||||
// search for phrase 'coffee cake'
|
||||
TextQuery.searching(new TextCriteria().matching("\"coffee cake\""));
|
||||
TextQuery.searching(new TextCriteria().phrase("coffee cake"));
|
||||
TextQuery.queryText(new TextCriteria().matching("\"coffee cake\""));
|
||||
TextQuery.queryText(new TextCriteria().phrase("coffee cake"));
|
||||
----
|
||||
|
||||
You can set flags for `$caseSensitive` and `$diacriticSensitive` by using the corresponding methods on `TextCriteria`. Note that these two optional flags have been introduced in MongoDB 3.2 and are not included in the query unless explicitly set.
|
||||
@@ -1860,8 +1886,6 @@ AggregationResults<TagCount> results = template.aggregate(aggregation, "tags", T
|
||||
|
||||
WARNING: Indexes are only used if the collation used for the operation matches the index collation.
|
||||
|
||||
include::./mongo-json-schema.adoc[leveloffset=+1]
|
||||
|
||||
<<mongo.repositories>> support `Collations` via the `collation` attribute of the `@Query` annotation.
|
||||
|
||||
.Collation support for Repositories
|
||||
@@ -1902,186 +1926,7 @@ as shown in (1) and (2), will be included when creating the index.
|
||||
TIP: The most specifc `Collation` outroules potentially defined others. Which means Method argument over query method annotation over doamin type annotation.
|
||||
====
|
||||
|
||||
[[mongo.jsonSchema]]
|
||||
=== JSON Schema
|
||||
|
||||
As of version 3.6, MongoDB supports collections that validate documents against a provided https://docs.mongodb.com/manual/core/schema-validation/#json-schema[JSON Schema].
|
||||
The schema itself and both validation action and level can be defined when creating the collection, as the following example shows:
|
||||
|
||||
.Sample JSON schema
|
||||
====
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"type": "object", <1>
|
||||
|
||||
"required": [ "firstname", "lastname" ], <2>
|
||||
|
||||
"properties": { <3>
|
||||
|
||||
"firstname": { <4>
|
||||
"type": "string",
|
||||
"enum": [ "luke", "han" ]
|
||||
},
|
||||
"address": { <5>
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"postCode": { "type": "string", "minLength": 4, "maxLength": 5 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
<1> JSON schema documents always describe a whole document from its root. A schema is a schema object itself that can contain
|
||||
embedded schema objects that describe properties and subdocuments.
|
||||
<2> `required` is a property that describes which properties are required in a document. It can be specified optionally, along with other
|
||||
schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords].
|
||||
<3> `properties` is related to a schema object that describes an `object` type. It contains property-specific schema constraints.
|
||||
<4> `firstname` specifies constraints for the `firsname` field inside the document. Here, it is a string-based `properties` element declaring
|
||||
possible field values.
|
||||
<5> `address` is a subdocument defining a schema for values in its `postCode` field.
|
||||
====
|
||||
|
||||
You can provide a schema either by specifying a schema document (that is, by using the `Document` API to parse or build a document object) or by building it with Spring Data's JSON schema utilities in `org.springframework.data.mongodb.core.schema`. `MongoJsonSchema` is the entry point for all JSON schema-related operations. The following example shows how use `MongoJsonSchema.builder()` to create a JSON schema:
|
||||
|
||||
.Creating a JSON schema
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MongoJsonSchema.builder() <1>
|
||||
.required("firstname", "lastname") <2>
|
||||
|
||||
.properties(
|
||||
string("firstname").possibleValues("luke", "han"), <3>
|
||||
|
||||
object("address")
|
||||
.properties(string("postCode").minLength(4).maxLength(5)))
|
||||
|
||||
.build(); <4>
|
||||
----
|
||||
<1> Obtain a schema builder to configure the schema with a fluent API.
|
||||
<2> Configure required properties.
|
||||
<3> Configure the String-typed `firstname` field, allowing only `luke` and `han` values. Properties can be typed or untyped. Use a static import of `JsonSchemaProperty` to make the syntax slightly more compact and to get entry points such as `string(…)`.
|
||||
<4> Build the schema object. Use the schema to create either a collection or <<mongodb-template-query.criteria,query documents>>.
|
||||
====
|
||||
|
||||
There are already some predefined and strongly typed schema objects (`JsonSchemaObject` and `JsonSchemaProperty`) available
|
||||
through static methods on the gateway interfaces.
|
||||
However, you may need to build custom property validation rules, which can be created through the builder API, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// "birthdate" : { "bsonType": "date" }
|
||||
JsonSchemaProperty.named("birthdate").ofType(Type.dateType());
|
||||
|
||||
// "birthdate" : { "bsonType": "date", "description", "Must be a date" }
|
||||
JsonSchemaProperty.named("birthdate").with(JsonSchemaObject.of(Type.dateType()).description("Must be a date"));
|
||||
----
|
||||
|
||||
The Schema builder also provides support for https://docs.mongodb.com/manual/core/security-client-side-encryption/[Client-Side Field Level Encryption]. Please refer to <<mongo.jsonSchema.encrypted-fields>> for more information,
|
||||
|
||||
`CollectionOptions` provides the entry point to schema support for collections, as the following example shows:
|
||||
|
||||
.Create collection with `$jsonSchema`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build();
|
||||
|
||||
template.createCollection(Person.class, CollectionOptions.empty().schema(schema));
|
||||
----
|
||||
====
|
||||
|
||||
You can use a schema to query any collection for documents that match a given structure defined by a JSON schema, as the following example shows:
|
||||
|
||||
.Query for Documents matching a `$jsonSchema`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build();
|
||||
|
||||
template.find(query(matchingDocumentStructure(schema)), Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
The following table shows the supported JSON schema types:
|
||||
|
||||
[cols="3,1,6", options="header"]
|
||||
.Supported JSON schema types
|
||||
|===
|
||||
| Schema Type
|
||||
| Java Type
|
||||
| Schema Properties
|
||||
|
||||
| `untyped`
|
||||
| -
|
||||
| `description`, generated `description`, `enum`, `allOf`, `anyOf`, `oneOf`, `not`
|
||||
|
||||
| `object`
|
||||
| `Object`
|
||||
| `required`, `additionalProperties`, `properties`, `minProperties`, `maxProperties`, `patternProperties`
|
||||
|
||||
| `array`
|
||||
| any array except `byte[]`
|
||||
| `uniqueItems`, `additionalItems`, `items`, `minItems`, `maxItems`
|
||||
|
||||
| `string`
|
||||
| `String`
|
||||
| `minLength`, `maxLentgth`, `pattern`
|
||||
|
||||
| `int`
|
||||
| `int`, `Integer`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `long`
|
||||
| `long`, `Long`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `double`
|
||||
| `float`, `Float`, `double`, `Double`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `decimal`
|
||||
| `BigDecimal`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `number`
|
||||
| `Number`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `binData`
|
||||
| `byte[]`
|
||||
| (none)
|
||||
|
||||
| `boolean`
|
||||
| `boolean`, `Boolean`
|
||||
| (none)
|
||||
|
||||
| `null`
|
||||
| `null`
|
||||
| (none)
|
||||
|
||||
| `objectId`
|
||||
| `ObjectId`
|
||||
| (none)
|
||||
|
||||
| `date`
|
||||
| `java.util.Date`
|
||||
| (none)
|
||||
|
||||
| `timestamp`
|
||||
| `BsonTimestamp`
|
||||
| (none)
|
||||
|
||||
| `regex`
|
||||
| `java.util.regex.Pattern`
|
||||
| (none)
|
||||
|
||||
|===
|
||||
|
||||
NOTE: `untyped` is a generic type that is inherited by all typed schema types. It provides all `untyped` schema properties to typed schema types.
|
||||
|
||||
For more information, see https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#op._S_jsonSchema[$jsonSchema].
|
||||
include::./mongo-json-schema.adoc[leveloffset=+1]
|
||||
|
||||
[[mongo.query.fluent-template-api]]
|
||||
=== Fluent Template API
|
||||
@@ -2221,6 +2066,7 @@ With the introduction of <<mongo.transactions>> this was no longer possible beca
|
||||
So in version 2.x `MongoOperations.count()` would use the collection statistics if no transaction was in progress, and the aggregation variant if so.
|
||||
|
||||
As of Spring Data MongoDB 3.x any `count` operation uses regardless the existence of filter criteria the aggregation-based count approach via MongoDBs `countDocuments`.
|
||||
If the application is fine with the limitations of working upon collection statistics `MongoOperations.estimatedCount()` offers an alternative.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
|
||||
@@ -97,3 +97,10 @@ Query query = new Query(new Criteria().alike(example));
|
||||
List<Person> result = template.find(query, Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
`UntypedExampleMatcher` is likely the right choice for you if you are storing different entities within a single collection or opted out of writing <<mongo-template.type-mapping,type hints>>.
|
||||
|
||||
Also, keep in mind that using `@TypeAlias` requires eager initialization of the `MappingContext`. To do so, configure `initialEntitySet` to to ensure proper alias resolution for read operations.
|
||||
====
|
||||
|
||||
@@ -1,6 +1,95 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 3.1.6 (2021-03-17)
|
||||
-------------------------------------
|
||||
* #3592 - Remove @Persistent from entity-scan include filters.
|
||||
* #3590 - Embedded sharding keys are not correctly picked up from the shardKeySource Document.
|
||||
* #3589 - Upgrade to MongoDB Driver 4.1.2.
|
||||
* #3573 - Json Schema section appears twice in reference documentation.
|
||||
* #3568 - MongoSocketWriteException may be translated into DataAccessResourceFailureException.
|
||||
* #3566 - Couldn't find PersistentEntity for type java.lang.Object when updating a field with suffix "class".
|
||||
* #3552 - UpdateMapper drops numeric keys in Maps.
|
||||
* #3395 - Derived findBy…IgnoreCaseIn query doesn't return expected results [DATAMONGO-2540].
|
||||
|
||||
|
||||
Changes in version 3.0.8.RELEASE (2021-03-17)
|
||||
---------------------------------------------
|
||||
* #3590 - Embedded sharding keys are not correctly picked up from the shardKeySource Document.
|
||||
* #3588 - Upgrade to MongoDB Driver 4.0.6.
|
||||
* #3573 - Json Schema section appears twice in reference documentation.
|
||||
* #3568 - MongoSocketWriteException may be translated into DataAccessResourceFailureException.
|
||||
* #3566 - Couldn't find PersistentEntity for type java.lang.Object when updating a field with suffix "class".
|
||||
* #3552 - UpdateMapper drops numeric keys in Maps.
|
||||
* #3395 - Derived findBy…IgnoreCaseIn query doesn't return expected results [DATAMONGO-2540].
|
||||
|
||||
|
||||
Changes in version 3.2.0-M4 (2021-02-18)
|
||||
----------------------------------------
|
||||
|
||||
|
||||
Changes in version 3.1.5 (2021-02-18)
|
||||
-------------------------------------
|
||||
|
||||
|
||||
Changes in version 3.2.0-M3 (2021-02-17)
|
||||
----------------------------------------
|
||||
* #3553 - Upgrade to MongoDB driver 4.2.0.
|
||||
* #3546 - org.bson.codecs.configuration.CodecConfigurationException: The uuidRepresentation has not been specified, so the UUID cannot be encoded.
|
||||
* #3544 - alike Criteria can't add andOperator.
|
||||
* #3542 - Relax field name checks for TypedAggregations.
|
||||
* #3540 - Allow access to mongoDatabaseFactory used in ReactiveMongoTemplate.
|
||||
* #3529 - Update repository after GitHub issues migration.
|
||||
* #3525 - Bug in full text query documentation [DATAMONGO-2673].
|
||||
* #3517 - GeoJson: Improper Deserialization of Document with a GeoJsonPolygon [DATAMONGO-2664].
|
||||
* #3508 - Add ReactiveMongoOperations.aggregate(…) Kotlin extension [DATAMONGO-2655].
|
||||
* #3474 - Search by alike() criteria is broken when type alias information is not available [DATAMONGO-2620].
|
||||
* #3055 - Improve count() and countDocuments() mapping documentation and/or method availability [DATAMONGO-2192].
|
||||
* #2803 - Support flattening embedded/nested objects [DATAMONGO-1902].
|
||||
|
||||
|
||||
Changes in version 3.1.4 (2021-02-17)
|
||||
-------------------------------------
|
||||
* #3546 - org.bson.codecs.configuration.CodecConfigurationException: The uuidRepresentation has not been specified, so the UUID cannot be encoded.
|
||||
* #3544 - alike Criteria can't add andOperator.
|
||||
* #3540 - Allow access to mongoDatabaseFactory used in ReactiveMongoTemplate.
|
||||
* #3525 - Bug in full text query documentation [DATAMONGO-2673].
|
||||
* #3517 - GeoJson: Improper Deserialization of Document with a GeoJsonPolygon [DATAMONGO-2664].
|
||||
* #3508 - Add ReactiveMongoOperations.aggregate(…) Kotlin extension [DATAMONGO-2655].
|
||||
* #3474 - Search by alike() criteria is broken when type alias information is not available [DATAMONGO-2620].
|
||||
* #3055 - Improve count() and countDocuments() mapping documentation and/or method availability [DATAMONGO-2192].
|
||||
|
||||
|
||||
Changes in version 3.0.7.RELEASE (2021-02-17)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-2671 - DateFromParts millisecondsOf returns "milliseconds" as $dateFromParts function but it should be millisecond.
|
||||
* DATAMONGO-2665 - Update CI jobs with Docker Login.
|
||||
* #3544 - alike Criteria can't add andOperator.
|
||||
* #3534 - Update copyright year to 2021.
|
||||
* #3529 - Update repository after GitHub issues migration.
|
||||
* #3525 - Bug in full text query documentation [DATAMONGO-2673].
|
||||
* #3517 - GeoJson: Improper Deserialization of Document with a GeoJsonPolygon [DATAMONGO-2664].
|
||||
* #3474 - Search by alike() criteria is broken when type alias information is not available [DATAMONGO-2620].
|
||||
|
||||
|
||||
Changes in version 2.2.13.RELEASE (2021-02-17)
|
||||
----------------------------------------------
|
||||
* #3544 - alike Criteria can't add andOperator.
|
||||
* #3534 - Update copyright year to 2021.
|
||||
* #3529 - Update repository after GitHub issues migration.
|
||||
* #3525 - Bug in full text query documentation [DATAMONGO-2673].
|
||||
|
||||
|
||||
Changes in version 3.2.0-M2 (2021-01-13)
|
||||
----------------------------------------
|
||||
* DATAMONGO-2671 - DateFromParts millisecondsOf returns "milliseconds" as $dateFromParts function but it should be millisecond.
|
||||
* DATAMONGO-2665 - Update CI jobs with Docker Login.
|
||||
* DATAMONGO-2651 - Allow AggregationExpression as part of group operation.
|
||||
* #3534 - Update copyright year to 2021.
|
||||
* #3529 - Update repository after GitHub issues migration.
|
||||
* #3515 - Deprecate KPropertyPath in favor of Spring Data Common's KPropertyPath [DATAMONGO-2662].
|
||||
|
||||
|
||||
Changes in version 3.1.3 (2021-01-13)
|
||||
-------------------------------------
|
||||
* DATAMONGO-2671 - DateFromParts millisecondsOf returns "milliseconds" as $dateFromParts function but it should be millisecond.
|
||||
@@ -3261,6 +3350,15 @@ Repository
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 3.1.3 (2020.0.3)
|
||||
Spring Data MongoDB 3.1.6 (2020.0.6)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -23,3 +23,6 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user