Compare commits
25 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b39b2591b6 | ||
|
|
65c8317e38 | ||
|
|
9d0f7bac6a | ||
|
|
6f50747d21 | ||
|
|
5cf1578ad3 | ||
|
|
78a59c45ca | ||
|
|
dccdfc8b4d | ||
|
|
e48239eb8f | ||
|
|
c3b4f61d29 | ||
|
|
22ed860b4a | ||
|
|
bf642ad3f7 | ||
|
|
fcd48539ea | ||
|
|
bf10f72a57 | ||
|
|
1c652cce1c | ||
|
|
dc2de878bc | ||
|
|
00cacc02ac | ||
|
|
811c2e5d7b | ||
|
|
200f3006bd | ||
|
|
1d6bea51ec | ||
|
|
7779ded45c | ||
|
|
918bf7c138 | ||
|
|
abe3b9f6d7 | ||
|
|
41c453cc83 | ||
|
|
77784d88c7 | ||
|
|
263c62c880 |
8
pom.xml
8
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.4</version>
|
||||
<version>3.1.6</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.4.4</version>
|
||||
<version>2.4.6</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.4.4</springdata.commons>
|
||||
<mongo>4.1.1</mongo>
|
||||
<springdata.commons>2.4.6</springdata.commons>
|
||||
<mongo>4.1.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.4</version>
|
||||
<version>3.1.6</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.4</version>
|
||||
<version>3.1.6</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.1.4</version>
|
||||
<version>3.1.6</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -26,7 +26,6 @@ import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
@@ -140,8 +139,7 @@ public abstract class MongoConfigurationSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and
|
||||
* {@link Persistent}.
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}.
|
||||
*
|
||||
* @param basePackage must not be {@literal null}.
|
||||
* @return
|
||||
@@ -161,7 +159,6 @@ public abstract class MongoConfigurationSupport {
|
||||
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
|
||||
false);
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
|
||||
|
||||
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.BsonInvalidOperationException;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
@@ -39,6 +40,7 @@ import org.springframework.util.ClassUtils;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoServerException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
|
||||
/**
|
||||
@@ -49,6 +51,7 @@ import com.mongodb.bulk.BulkWriteError;
|
||||
* @author Oliver Gierke
|
||||
* @author Michal Vich
|
||||
* @author Christoph Strobl
|
||||
* @author Brice Vandeputte
|
||||
*/
|
||||
public class MongoExceptionTranslator implements PersistenceExceptionTranslator {
|
||||
|
||||
@@ -78,6 +81,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoSocketException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
String exception = ClassUtils.getShortName(ClassUtils.getUserClass(ex.getClass()));
|
||||
|
||||
if (DUPLICATE_KEY_EXCEPTIONS.contains(exception)) {
|
||||
|
||||
@@ -658,7 +658,8 @@ class QueryOperations {
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
|
||||
getMappedShardKeyFields(domainType)
|
||||
.forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue(shardKeySource, key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
|
||||
@@ -17,17 +17,16 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -110,28 +109,7 @@ class DocumentAccessor {
|
||||
*/
|
||||
@Nullable
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
Map<String, Object> map = BsonUtils.asMap(document);
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return map.get(fieldName);
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<String, Object> source = map;
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
|
||||
result = source.get(parts.next());
|
||||
|
||||
if (parts.hasNext()) {
|
||||
source = getAsMap(result);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return BsonUtils.resolveValue(document, property.getFieldName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -157,71 +135,7 @@ class DocumentAccessor {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
|
||||
if (this.document instanceof Document) {
|
||||
|
||||
if (((Document) this.document).containsKey(fieldName)) {
|
||||
return true;
|
||||
}
|
||||
} else if (this.document instanceof DBObject) {
|
||||
if (((DBObject) this.document).containsField(fieldName)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String[] parts = fieldName.split("\\.");
|
||||
Map<String, Object> source;
|
||||
|
||||
if (this.document instanceof Document) {
|
||||
source = ((Document) this.document);
|
||||
} else {
|
||||
source = ((DBObject) this.document).toMap();
|
||||
}
|
||||
|
||||
Object result = null;
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
result = source.get(parts[i - 1]);
|
||||
source = getAsMap(result);
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return source.containsKey(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given source object as map, i.e. {@link Document}s and maps as is or {@literal null} otherwise.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
return BsonUtils.hasValue(document, property.getFieldName());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1152,7 +1152,8 @@ public class QueryMapper {
|
||||
|
||||
private boolean isPathToJavaLangClassProperty(PropertyPath path) {
|
||||
|
||||
if (path.getType().equals(Class.class) && path.getLeafProperty().getOwningType().getType().equals(Class.class)) {
|
||||
if ((path.getType() == Class.class || path.getType().equals(Object.class))
|
||||
&& path.getLeafProperty().getType() == Class.class) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -1261,9 +1262,9 @@ public class QueryMapper {
|
||||
|
||||
String partial = iterator.next();
|
||||
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike();
|
||||
|
||||
if (isPositional) {
|
||||
if (isPositional || property.isMap()) {
|
||||
mappedName.append(".").append(partial);
|
||||
}
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
@@ -102,6 +103,15 @@ public enum MongoRegexCreator {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source
|
||||
* @return
|
||||
* @since 2.2.14
|
||||
*/
|
||||
public Object toCaseInsensitiveMatch(Object source) {
|
||||
return source instanceof String ? new BsonRegularExpression(Pattern.quote((String) source), "i") : source;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, MatchMode matcherType) {
|
||||
|
||||
if (MatchMode.REGEX == matcherType) {
|
||||
|
||||
@@ -25,7 +25,6 @@ import java.util.regex.Pattern;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Range.Bound;
|
||||
import org.springframework.data.domain.Sort;
|
||||
@@ -51,8 +50,10 @@ import org.springframework.data.repository.query.parser.Part;
|
||||
import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom query creator to create Mongo criterias.
|
||||
@@ -196,9 +197,9 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
case IS_NULL:
|
||||
return criteria.is(null);
|
||||
case NOT_IN:
|
||||
return criteria.nin(nextAsArray(parameters));
|
||||
return criteria.nin(nextAsList(parameters, part));
|
||||
case IN:
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
return criteria.in(nextAsList(parameters, part));
|
||||
case LIKE:
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
@@ -337,7 +338,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
Iterator<Object> parameters) {
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
return criteria.in(nextAsList(parameters, part));
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next());
|
||||
@@ -400,17 +401,24 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(Iterator<Object> iterator) {
|
||||
private java.util.List<?> nextAsList(Iterator<Object> iterator, Part part) {
|
||||
|
||||
Object next = iterator.next();
|
||||
|
||||
if (next instanceof Collection) {
|
||||
return ((Collection<?>) next).toArray();
|
||||
} else if (next != null && next.getClass().isArray()) {
|
||||
return (Object[]) next;
|
||||
Streamable<?> streamable = asStreamable(iterator.next());
|
||||
if (!isSimpleComparisionPossible(part)) {
|
||||
streamable = streamable.map(MongoRegexCreator.INSTANCE::toCaseInsensitiveMatch);
|
||||
}
|
||||
|
||||
return new Object[] { next };
|
||||
return streamable.toList();
|
||||
}
|
||||
|
||||
private Streamable<?> asStreamable(Object value) {
|
||||
|
||||
if (value instanceof Collection) {
|
||||
return Streamable.of((Collection<?>) value);
|
||||
} else if (ObjectUtils.isArray(value)) {
|
||||
return Streamable.of((Object[]) value);
|
||||
}
|
||||
return Streamable.of(value);
|
||||
}
|
||||
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
@@ -282,6 +282,109 @@ public class BsonUtils {
|
||||
.orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry())));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a the value for a given key. If the given {@link Bson} value contains the key the value is immediately
|
||||
* returned. If not and the key contains a path using the dot ({@code .}) notation it will try to resolve the path by
|
||||
* inspecting the individual parts. If one of the intermediate ones is {@literal null} or cannot be inspected further
|
||||
* (wrong) type, {@literal null} is returned.
|
||||
*
|
||||
* @param bson the source to inspect. Must not be {@literal null}.
|
||||
* @param key the key to lookup. Must not be {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
* @since 3.0.8
|
||||
*/
|
||||
@Nullable
|
||||
public static Object resolveValue(Bson bson, String key) {
|
||||
|
||||
Map<String, Object> source = asMap(bson);
|
||||
|
||||
if (source.containsKey(key) || !key.contains(".")) {
|
||||
return source.get(key);
|
||||
}
|
||||
|
||||
String[] parts = key.split("\\.");
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
Object result = source.get(parts[i - 1]);
|
||||
|
||||
if (!(result instanceof Bson)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
source = asMap((Bson) result);
|
||||
}
|
||||
|
||||
return source.get(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the underlying {@link Bson bson} has a value ({@literal null} or non-{@literal null}) for the given
|
||||
* {@code key}.
|
||||
*
|
||||
* @param bson the source to inspect. Must not be {@literal null}.
|
||||
* @param key the key to lookup. Must not be {@literal null}.
|
||||
* @return {@literal true} if no non {@literal null} value present.
|
||||
* @since 3.0.8
|
||||
*/
|
||||
public static boolean hasValue(Bson bson, String key) {
|
||||
|
||||
Map<String, Object> source = asMap(bson);
|
||||
|
||||
if (source.get(key) != null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!key.contains(".")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String[] parts = key.split("\\.");
|
||||
|
||||
Object result;
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
result = source.get(parts[i - 1]);
|
||||
source = getAsMap(result);
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return source.containsKey(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given source object as map, i.e. {@link Document}s and maps as is or {@literal null} otherwise.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof DBObject) {
|
||||
return ((DBObject) source).toMap();
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static String toJson(@Nullable Object value) {
|
||||
|
||||
|
||||
@@ -17,8 +17,6 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -32,11 +30,14 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.MongoTransactionException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoCursorNotFoundException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoInternalException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.MongoSocketReadTimeoutException;
|
||||
import com.mongodb.MongoSocketWriteException;
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
/**
|
||||
@@ -45,18 +46,20 @@ import com.mongodb.ServerAddress;
|
||||
* @author Michal Vich
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Brice Vandeputte
|
||||
*/
|
||||
public class MongoExceptionTranslatorUnitTests {
|
||||
class MongoExceptionTranslatorUnitTests {
|
||||
|
||||
MongoExceptionTranslator translator;
|
||||
private static final String EXCEPTION_MESSAGE = "IOException";
|
||||
private MongoExceptionTranslator translator;
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
void setUp() {
|
||||
translator = new MongoExceptionTranslator();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateDuplicateKey() {
|
||||
void translateDuplicateKey() {
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(
|
||||
@@ -64,17 +67,33 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
DuplicateKeyException.class, null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateSocketException() {
|
||||
@Test // GH-3568
|
||||
void translateSocketException() {
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(new MongoSocketException("IOException", new ServerAddress())),
|
||||
DataAccessResourceFailureException.class, "IOException");
|
||||
translator.translateExceptionIfPossible(new MongoSocketException(EXCEPTION_MESSAGE, new ServerAddress())),
|
||||
DataAccessResourceFailureException.class, EXCEPTION_MESSAGE);
|
||||
}
|
||||
|
||||
@Test // GH-3568
|
||||
void translateSocketExceptionSubclasses() {
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(
|
||||
new MongoSocketWriteException("intermediate message", new ServerAddress(), new Exception(EXCEPTION_MESSAGE))
|
||||
),
|
||||
DataAccessResourceFailureException.class, EXCEPTION_MESSAGE);
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(
|
||||
new MongoSocketReadTimeoutException("intermediate message", new ServerAddress(), new Exception(EXCEPTION_MESSAGE))
|
||||
),
|
||||
DataAccessResourceFailureException.class, EXCEPTION_MESSAGE);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateCursorNotFound() throws UnknownHostException {
|
||||
void translateCursorNotFound() {
|
||||
|
||||
expectExceptionWithCauseMessage(
|
||||
translator.translateExceptionIfPossible(new MongoCursorNotFoundException(1L, new ServerAddress())),
|
||||
@@ -82,21 +101,21 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateToDuplicateKeyException() {
|
||||
void translateToDuplicateKeyException() {
|
||||
|
||||
checkTranslatedMongoException(DuplicateKeyException.class, 11000);
|
||||
checkTranslatedMongoException(DuplicateKeyException.class, 11001);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateToDataAccessResourceFailureException() {
|
||||
void translateToDataAccessResourceFailureException() {
|
||||
|
||||
checkTranslatedMongoException(DataAccessResourceFailureException.class, 12000);
|
||||
checkTranslatedMongoException(DataAccessResourceFailureException.class, 13440);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateToInvalidDataAccessApiUsageException() {
|
||||
void translateToInvalidDataAccessApiUsageException() {
|
||||
|
||||
checkTranslatedMongoException(InvalidDataAccessApiUsageException.class, 10003);
|
||||
checkTranslatedMongoException(InvalidDataAccessApiUsageException.class, 12001);
|
||||
@@ -106,7 +125,7 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateToUncategorizedMongoDbException() {
|
||||
void translateToUncategorizedMongoDbException() {
|
||||
|
||||
MongoException exception = new MongoException(0, "");
|
||||
DataAccessException translatedException = translator.translateExceptionIfPossible(exception);
|
||||
@@ -115,7 +134,7 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateMongoInternalException() {
|
||||
void translateMongoInternalException() {
|
||||
|
||||
MongoInternalException exception = new MongoInternalException("Internal exception");
|
||||
DataAccessException translatedException = translator.translateExceptionIfPossible(exception);
|
||||
@@ -124,14 +143,14 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void translateUnsupportedException() {
|
||||
void translateUnsupportedException() {
|
||||
|
||||
RuntimeException exception = new RuntimeException();
|
||||
assertThat(translator.translateExceptionIfPossible(exception)).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2045
|
||||
public void translateSessionExceptions() {
|
||||
void translateSessionExceptions() {
|
||||
|
||||
checkTranslatedMongoException(ClientSessionException.class, 206);
|
||||
checkTranslatedMongoException(ClientSessionException.class, 213);
|
||||
@@ -140,7 +159,7 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2045
|
||||
public void translateTransactionExceptions() {
|
||||
void translateTransactionExceptions() {
|
||||
|
||||
checkTranslatedMongoException(MongoTransactionException.class, 217);
|
||||
checkTranslatedMongoException(MongoTransactionException.class, 225);
|
||||
@@ -163,13 +182,13 @@ public class MongoExceptionTranslatorUnitTests {
|
||||
assertThat(((MongoException) cause).getCode()).isEqualTo(code);
|
||||
}
|
||||
|
||||
private static void expectExceptionWithCauseMessage(NestedRuntimeException e,
|
||||
private static void expectExceptionWithCauseMessage(@Nullable NestedRuntimeException e,
|
||||
Class<? extends NestedRuntimeException> type) {
|
||||
expectExceptionWithCauseMessage(e, type, null);
|
||||
}
|
||||
|
||||
private static void expectExceptionWithCauseMessage(NestedRuntimeException e,
|
||||
Class<? extends NestedRuntimeException> type, String message) {
|
||||
private static void expectExceptionWithCauseMessage(@Nullable NestedRuntimeException e,
|
||||
Class<? extends NestedRuntimeException> type, @Nullable String message) {
|
||||
|
||||
assertThat(e).isInstanceOf(type);
|
||||
|
||||
|
||||
@@ -84,6 +84,7 @@ import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.Sharded;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
@@ -1922,6 +1923,24 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
verify(findIterable, never()).first();
|
||||
}
|
||||
|
||||
@Test // GH-3590
|
||||
void shouldIncludeValueFromNestedShardKeyPath() {
|
||||
|
||||
WithShardKeyPointingToNested source = new WithShardKeyPointingToNested();
|
||||
source.id = "id-1";
|
||||
source.value = "v1";
|
||||
source.nested = new WithNamedFields();
|
||||
source.nested.customName = "cname";
|
||||
source.nested.name = "name";
|
||||
|
||||
template.save(source);
|
||||
|
||||
ArgumentCaptor<Bson> filter = ArgumentCaptor.forClass(Bson.class);
|
||||
verify(collection).replaceOne(filter.capture(), any(), any());
|
||||
|
||||
assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("value", "v1").append("nested.custom-named-field", "cname"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2341
|
||||
void saveShouldProjectOnShardKeyWhenLoadingExistingDocument() {
|
||||
|
||||
@@ -2267,6 +2286,13 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
@Field("firstname") String name;
|
||||
}
|
||||
|
||||
@Sharded(shardKey = {"value", "nested.customName"})
|
||||
static class WithShardKeyPointingToNested {
|
||||
String id;
|
||||
String value;
|
||||
WithNamedFields nested;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks out the {@link MongoTemplate#getDb()} method to return the {@link DB} mock instead of executing the actual
|
||||
* behaviour.
|
||||
|
||||
@@ -1089,6 +1089,38 @@ class UpdateMapperUnitTests {
|
||||
assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("aliased.$[element].value", 10)));
|
||||
}
|
||||
|
||||
@Test // GH-3552
|
||||
void numericKeyForMap() {
|
||||
|
||||
Update update = new Update().set("map.601218778970110001827396", "testing");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.601218778970110001827396\": \"testing\"}}");
|
||||
}
|
||||
|
||||
@Test // GH-3552
|
||||
void numericKeyInMapOfNestedPath() {
|
||||
|
||||
Update update = new Update().set("map.601218778970110001827396.value", "testing");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate)
|
||||
.isEqualTo("{\"$set\": {\"map.601218778970110001827396.value\": \"testing\"}}");
|
||||
}
|
||||
|
||||
@Test // GH-3566
|
||||
void mapsObjectClassPropertyFieldInMapValueTypeAsKey() {
|
||||
|
||||
Update update = new Update().set("map.class", "value");
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate)
|
||||
.isEqualTo("{\"$set\": {\"map.class\": \"value\"}}");
|
||||
}
|
||||
|
||||
static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes {
|
||||
ListModelWrapper concreteTypeWithListAttributeOfInterfaceType;
|
||||
}
|
||||
|
||||
@@ -1363,4 +1363,19 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
assertThat(repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Dave")).containsExactly(dave);
|
||||
assertThat(repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Carter")).containsExactly(carter);
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
void caseInSensitiveInClause() {
|
||||
assertThat(repository.findByLastnameIgnoreCaseIn("bEAuFoRd", "maTTheWs")).hasSize(3);
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
void caseInSensitiveInClauseQuotesExpressions() {
|
||||
assertThat(repository.findByLastnameIgnoreCaseIn(".*")).isEmpty();
|
||||
}
|
||||
|
||||
@Test // GH-3395
|
||||
void caseSensitiveInClauseIgnoresExpressions() {
|
||||
assertThat(repository.findByFirstnameIn(".*")).isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,6 +125,8 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
@Query("{ 'lastname' : { '$regex' : '?0', '$options' : 'i'}}")
|
||||
Page<Person> findByLastnameLikeWithPageable(String lastname, Pageable pageable);
|
||||
|
||||
List<Person> findByLastnameIgnoreCaseIn(String... lastname);
|
||||
|
||||
/**
|
||||
* Returns all {@link Person}s with a firstname contained in the given varargs.
|
||||
*
|
||||
|
||||
@@ -1886,8 +1886,6 @@ AggregationResults<TagCount> results = template.aggregate(aggregation, "tags", T
|
||||
|
||||
WARNING: Indexes are only used if the collation used for the operation matches the index collation.
|
||||
|
||||
include::./mongo-json-schema.adoc[leveloffset=+1]
|
||||
|
||||
<<mongo.repositories>> support `Collations` via the `collation` attribute of the `@Query` annotation.
|
||||
|
||||
.Collation support for Repositories
|
||||
@@ -1928,186 +1926,7 @@ as shown in (1) and (2), will be included when creating the index.
|
||||
TIP: The most specifc `Collation` outroules potentially defined others. Which means Method argument over query method annotation over doamin type annotation.
|
||||
====
|
||||
|
||||
[[mongo.jsonSchema]]
|
||||
=== JSON Schema
|
||||
|
||||
As of version 3.6, MongoDB supports collections that validate documents against a provided https://docs.mongodb.com/manual/core/schema-validation/#json-schema[JSON Schema].
|
||||
The schema itself and both validation action and level can be defined when creating the collection, as the following example shows:
|
||||
|
||||
.Sample JSON schema
|
||||
====
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"type": "object", <1>
|
||||
|
||||
"required": [ "firstname", "lastname" ], <2>
|
||||
|
||||
"properties": { <3>
|
||||
|
||||
"firstname": { <4>
|
||||
"type": "string",
|
||||
"enum": [ "luke", "han" ]
|
||||
},
|
||||
"address": { <5>
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"postCode": { "type": "string", "minLength": 4, "maxLength": 5 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
<1> JSON schema documents always describe a whole document from its root. A schema is a schema object itself that can contain
|
||||
embedded schema objects that describe properties and subdocuments.
|
||||
<2> `required` is a property that describes which properties are required in a document. It can be specified optionally, along with other
|
||||
schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords].
|
||||
<3> `properties` is related to a schema object that describes an `object` type. It contains property-specific schema constraints.
|
||||
<4> `firstname` specifies constraints for the `firsname` field inside the document. Here, it is a string-based `properties` element declaring
|
||||
possible field values.
|
||||
<5> `address` is a subdocument defining a schema for values in its `postCode` field.
|
||||
====
|
||||
|
||||
You can provide a schema either by specifying a schema document (that is, by using the `Document` API to parse or build a document object) or by building it with Spring Data's JSON schema utilities in `org.springframework.data.mongodb.core.schema`. `MongoJsonSchema` is the entry point for all JSON schema-related operations. The following example shows how use `MongoJsonSchema.builder()` to create a JSON schema:
|
||||
|
||||
.Creating a JSON schema
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MongoJsonSchema.builder() <1>
|
||||
.required("firstname", "lastname") <2>
|
||||
|
||||
.properties(
|
||||
string("firstname").possibleValues("luke", "han"), <3>
|
||||
|
||||
object("address")
|
||||
.properties(string("postCode").minLength(4).maxLength(5)))
|
||||
|
||||
.build(); <4>
|
||||
----
|
||||
<1> Obtain a schema builder to configure the schema with a fluent API.
|
||||
<2> Configure required properties.
|
||||
<3> Configure the String-typed `firstname` field, allowing only `luke` and `han` values. Properties can be typed or untyped. Use a static import of `JsonSchemaProperty` to make the syntax slightly more compact and to get entry points such as `string(…)`.
|
||||
<4> Build the schema object. Use the schema to create either a collection or <<mongodb-template-query.criteria,query documents>>.
|
||||
====
|
||||
|
||||
There are already some predefined and strongly typed schema objects (`JsonSchemaObject` and `JsonSchemaProperty`) available
|
||||
through static methods on the gateway interfaces.
|
||||
However, you may need to build custom property validation rules, which can be created through the builder API, as the following example shows:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
// "birthdate" : { "bsonType": "date" }
|
||||
JsonSchemaProperty.named("birthdate").ofType(Type.dateType());
|
||||
|
||||
// "birthdate" : { "bsonType": "date", "description", "Must be a date" }
|
||||
JsonSchemaProperty.named("birthdate").with(JsonSchemaObject.of(Type.dateType()).description("Must be a date"));
|
||||
----
|
||||
|
||||
The Schema builder also provides support for https://docs.mongodb.com/manual/core/security-client-side-encryption/[Client-Side Field Level Encryption]. Please refer to <<mongo.jsonSchema.encrypted-fields>> for more information,
|
||||
|
||||
`CollectionOptions` provides the entry point to schema support for collections, as the following example shows:
|
||||
|
||||
.Create collection with `$jsonSchema`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build();
|
||||
|
||||
template.createCollection(Person.class, CollectionOptions.empty().schema(schema));
|
||||
----
|
||||
====
|
||||
|
||||
You can use a schema to query any collection for documents that match a given structure defined by a JSON schema, as the following example shows:
|
||||
|
||||
.Query for Documents matching a `$jsonSchema`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build();
|
||||
|
||||
template.find(query(matchingDocumentStructure(schema)), Person.class);
|
||||
----
|
||||
====
|
||||
|
||||
The following table shows the supported JSON schema types:
|
||||
|
||||
[cols="3,1,6", options="header"]
|
||||
.Supported JSON schema types
|
||||
|===
|
||||
| Schema Type
|
||||
| Java Type
|
||||
| Schema Properties
|
||||
|
||||
| `untyped`
|
||||
| -
|
||||
| `description`, generated `description`, `enum`, `allOf`, `anyOf`, `oneOf`, `not`
|
||||
|
||||
| `object`
|
||||
| `Object`
|
||||
| `required`, `additionalProperties`, `properties`, `minProperties`, `maxProperties`, `patternProperties`
|
||||
|
||||
| `array`
|
||||
| any array except `byte[]`
|
||||
| `uniqueItems`, `additionalItems`, `items`, `minItems`, `maxItems`
|
||||
|
||||
| `string`
|
||||
| `String`
|
||||
| `minLength`, `maxLentgth`, `pattern`
|
||||
|
||||
| `int`
|
||||
| `int`, `Integer`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `long`
|
||||
| `long`, `Long`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `double`
|
||||
| `float`, `Float`, `double`, `Double`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `decimal`
|
||||
| `BigDecimal`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `number`
|
||||
| `Number`
|
||||
| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum`
|
||||
|
||||
| `binData`
|
||||
| `byte[]`
|
||||
| (none)
|
||||
|
||||
| `boolean`
|
||||
| `boolean`, `Boolean`
|
||||
| (none)
|
||||
|
||||
| `null`
|
||||
| `null`
|
||||
| (none)
|
||||
|
||||
| `objectId`
|
||||
| `ObjectId`
|
||||
| (none)
|
||||
|
||||
| `date`
|
||||
| `java.util.Date`
|
||||
| (none)
|
||||
|
||||
| `timestamp`
|
||||
| `BsonTimestamp`
|
||||
| (none)
|
||||
|
||||
| `regex`
|
||||
| `java.util.regex.Pattern`
|
||||
| (none)
|
||||
|
||||
|===
|
||||
|
||||
NOTE: `untyped` is a generic type that is inherited by all typed schema types. It provides all `untyped` schema properties to typed schema types.
|
||||
|
||||
For more information, see https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#op._S_jsonSchema[$jsonSchema].
|
||||
include::./mongo-json-schema.adoc[leveloffset=+1]
|
||||
|
||||
[[mongo.query.fluent-template-api]]
|
||||
=== Fluent Template API
|
||||
|
||||
@@ -1,6 +1,53 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 3.1.6 (2021-03-17)
|
||||
-------------------------------------
|
||||
* #3592 - Remove @Persistent from entity-scan include filters.
|
||||
* #3590 - Embedded sharding keys are not correctly picked up from the shardKeySource Document.
|
||||
* #3589 - Upgrade to MongoDB Driver 4.1.2.
|
||||
* #3573 - Json Schema section appears twice in reference documentation.
|
||||
* #3568 - MongoSocketWriteException may be translated into DataAccessResourceFailureException.
|
||||
* #3566 - Couldn't find PersistentEntity for type java.lang.Object when updating a field with suffix "class".
|
||||
* #3552 - UpdateMapper drops numeric keys in Maps.
|
||||
* #3395 - Derived findBy…IgnoreCaseIn query doesn't return expected results [DATAMONGO-2540].
|
||||
|
||||
|
||||
Changes in version 3.0.8.RELEASE (2021-03-17)
|
||||
---------------------------------------------
|
||||
* #3590 - Embedded sharding keys are not correctly picked up from the shardKeySource Document.
|
||||
* #3588 - Upgrade to MongoDB Driver 4.0.6.
|
||||
* #3573 - Json Schema section appears twice in reference documentation.
|
||||
* #3568 - MongoSocketWriteException may be translated into DataAccessResourceFailureException.
|
||||
* #3566 - Couldn't find PersistentEntity for type java.lang.Object when updating a field with suffix "class".
|
||||
* #3552 - UpdateMapper drops numeric keys in Maps.
|
||||
* #3395 - Derived findBy…IgnoreCaseIn query doesn't return expected results [DATAMONGO-2540].
|
||||
|
||||
|
||||
Changes in version 3.2.0-M4 (2021-02-18)
|
||||
----------------------------------------
|
||||
|
||||
|
||||
Changes in version 3.1.5 (2021-02-18)
|
||||
-------------------------------------
|
||||
|
||||
|
||||
Changes in version 3.2.0-M3 (2021-02-17)
|
||||
----------------------------------------
|
||||
* #3553 - Upgrade to MongoDB driver 4.2.0.
|
||||
* #3546 - org.bson.codecs.configuration.CodecConfigurationException: The uuidRepresentation has not been specified, so the UUID cannot be encoded.
|
||||
* #3544 - alike Criteria can't add andOperator.
|
||||
* #3542 - Relax field name checks for TypedAggregations.
|
||||
* #3540 - Allow access to mongoDatabaseFactory used in ReactiveMongoTemplate.
|
||||
* #3529 - Update repository after GitHub issues migration.
|
||||
* #3525 - Bug in full text query documentation [DATAMONGO-2673].
|
||||
* #3517 - GeoJson: Improper Deserialization of Document with a GeoJsonPolygon [DATAMONGO-2664].
|
||||
* #3508 - Add ReactiveMongoOperations.aggregate(…) Kotlin extension [DATAMONGO-2655].
|
||||
* #3474 - Search by alike() criteria is broken when type alias information is not available [DATAMONGO-2620].
|
||||
* #3055 - Improve count() and countDocuments() mapping documentation and/or method availability [DATAMONGO-2192].
|
||||
* #2803 - Support flattening embedded/nested objects [DATAMONGO-1902].
|
||||
|
||||
|
||||
Changes in version 3.1.4 (2021-02-17)
|
||||
-------------------------------------
|
||||
* #3546 - org.bson.codecs.configuration.CodecConfigurationException: The uuidRepresentation has not been specified, so the UUID cannot be encoded.
|
||||
@@ -3311,6 +3358,11 @@ Repository
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 3.1.4 (2020.0.4)
|
||||
Spring Data MongoDB 3.1.6 (2020.0.6)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -24,3 +24,5 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user