Avoid capturing lambdas, update javadoc and add tests.

Also allow direct usage of (at)Reference from data commons to define associations.

Original pull request: #3647.
Closes #3602.
This commit is contained in:
Christoph Strobl
2021-05-19 11:30:05 +02:00
committed by Mark Paluch
parent 82af678cab
commit e96ef8e18f
16 changed files with 748 additions and 190 deletions

View File

@@ -19,40 +19,45 @@ import static org.springframework.data.mongodb.core.convert.ReferenceLookupDeleg
import java.util.Collections;
import org.springframework.data.mongodb.core.mapping.DBRef;
import org.springframework.data.mongodb.core.mapping.DocumentReference;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
/**
* {@link ReferenceResolver} implementation that uses a given {@link ReferenceLookupDelegate} to load and convert entity
* associations expressed via a {@link MongoPersistentProperty persitent property}. Creates {@link LazyLoadingProxy
* proxies} for associations that should be lazily loaded.
*
* @author Christoph Strobl
*/
public class DefaultReferenceResolver implements ReferenceResolver {
private final ReferenceLoader referenceLoader;
private final LookupFunction collectionLookupFunction = (filter, ctx) -> getReferenceLoader().fetchMany(filter, ctx);
private final LookupFunction singleValueLookupFunction = (filter, ctx) -> {
Object target = getReferenceLoader().fetchOne(filter, ctx);
return target == null ? Collections.emptyList() : Collections.singleton(getReferenceLoader().fetchOne(filter, ctx));
};
/**
* Create a new instance of {@link DefaultReferenceResolver}.
*
* @param referenceLoader must not be {@literal null}.
*/
public DefaultReferenceResolver(ReferenceLoader referenceLoader) {
Assert.notNull(referenceLoader, "ReferenceLoader must not be null!");
this.referenceLoader = referenceLoader;
}
@Override
public ReferenceLoader getReferenceLoader() {
return referenceLoader;
}
@Nullable
@Override
public Object resolveReference(MongoPersistentProperty property, Object source,
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
LookupFunction lookupFunction = (filter, ctx) -> {
if (property.isCollectionLike() || property.isMap()) {
return getReferenceLoader().fetchMany(filter, ctx);
}
Object target = getReferenceLoader().fetchOne(filter, ctx);
return target == null ? Collections.emptyList()
: Collections.singleton(getReferenceLoader().fetchOne(filter, ctx));
};
LookupFunction lookupFunction = (property.isCollectionLike() || property.isMap()) ? collectionLookupFunction
: singleValueLookupFunction;
if (isLazyReference(property)) {
return createLazyLoadingProxy(property, source, referenceLookupDelegate, lookupFunction, entityReader);
@@ -61,13 +66,14 @@ public class DefaultReferenceResolver implements ReferenceResolver {
return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader);
}
private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source,
ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction,
MongoEntityReader entityReader) {
return new LazyLoadingProxyFactory(referenceLookupDelegate).createLazyLoadingProxy(property, source, lookupFunction,
entityReader);
}
/**
* Check if the association expressed by the given {@link MongoPersistentProperty property} should be resolved lazily.
*
* @param property
* @return return {@literal true} if the defined association is lazy.
* @see DBRef#lazy()
* @see DocumentReference#lazy()
*/
protected boolean isLazyReference(MongoPersistentProperty property) {
if (property.isDocumentReference()) {
@@ -76,4 +82,19 @@ public class DefaultReferenceResolver implements ReferenceResolver {
return property.getDBRef() != null && property.getDBRef().lazy();
}
/**
* The {@link ReferenceLoader} executing the lookup.
*
* @return never {@literal null}.
*/
protected ReferenceLoader getReferenceLoader() {
return referenceLoader;
}
private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source,
ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) {
return new LazyLoadingProxyFactory(referenceLookupDelegate).createLazyLoadingProxy(property, source, lookupFunction,
entityReader);
}
}

View File

@@ -15,18 +15,20 @@
*/
package org.springframework.data.mongodb.core.convert;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.WeakHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.bson.Document;
import org.springframework.core.convert.ConversionService;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.annotation.Reference;
import org.springframework.data.mapping.PersistentPropertyAccessor;
import org.springframework.data.mapping.PersistentPropertyPath;
import org.springframework.data.mapping.PropertyPath;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.BeanWrapperPropertyAccessorFactory;
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
@@ -34,6 +36,10 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
/**
* Internal API to construct {@link DocumentPointer} for a given property. Considers {@link LazyLoadingProxy},
* registered {@link Object} to {@link DocumentPointer} {@link org.springframework.core.convert.converter.Converter},
* simple {@literal _id} lookups and cases where the {@link DocumentPointer} needs to be computed via a lookup query.
*
* @author Christoph Strobl
* @since 3.3
*/
@@ -41,17 +47,29 @@ class DocumentPointerFactory {
private final ConversionService conversionService;
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
private final Map<String, LinkageDocument> linkageMap;
private final Map<String, LinkageDocument> cache;
public DocumentPointerFactory(ConversionService conversionService,
/**
* A {@link Pattern} matching quoted and unquoted variants (with/out whitespaces) of
* <code>{'_id' : ?#{#target} }</code>.
*/
private static final Pattern DEFAULT_LOOKUP_PATTERN = Pattern.compile("\\{\\s?" + // document start (whitespace opt)
"['\"]?_id['\"]?" + // followed by an optionally quoted _id. Like: _id, '_id' or "_id"
"?\\s?:\\s?" + // then a colon optionally wrapped inside whitespaces
"['\"]?\\?#\\{#target\\}['\"]?" + // leading to the potentially quoted ?#{#target} expression
"\\s*}"); // some optional whitespaces and document close
DocumentPointerFactory(ConversionService conversionService,
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
this.conversionService = conversionService;
this.mappingContext = mappingContext;
this.linkageMap = new HashMap<>();
this.cache = new WeakHashMap<>();
}
public DocumentPointer<?> computePointer(MongoPersistentProperty property, Object value, Class<?> typeHint) {
DocumentPointer<?> computePointer(
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
MongoPersistentProperty property, Object value, Class<?> typeHint) {
if (value instanceof LazyLoadingProxy) {
return () -> ((LazyLoadingProxy) value).getSource();
@@ -59,92 +77,161 @@ class DocumentPointerFactory {
if (conversionService.canConvert(typeHint, DocumentPointer.class)) {
return conversionService.convert(value, DocumentPointer.class);
} else {
}
MongoPersistentEntity<?> persistentEntity = mappingContext
.getRequiredPersistentEntity(property.getAssociationTargetType());
MongoPersistentEntity<?> persistentEntity = mappingContext
.getRequiredPersistentEntity(property.getAssociationTargetType());
// TODO: Extract method
if (!property.getDocumentReference().lookup().toLowerCase(Locale.ROOT).replaceAll("\\s", "").replaceAll("'", "")
.equals("{_id:?#{#target}}")) {
MongoPersistentEntity<?> valueEntity = mappingContext.getPersistentEntity(value.getClass());
PersistentPropertyAccessor<Object> propertyAccessor;
if (valueEntity == null) {
propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(),
value);
} else {
propertyAccessor = valueEntity.getPropertyAccessor(value);
}
return () -> linkageMap.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::new)
.get(persistentEntity, propertyAccessor);
}
// just take the id as a reference
if (usesDefaultLookup(property)) {
return () -> persistentEntity.getIdentifierAccessor(value).getIdentifier();
}
MongoPersistentEntity<?> valueEntity = mappingContext.getPersistentEntity(value.getClass());
PersistentPropertyAccessor<Object> propertyAccessor;
if (valueEntity == null) {
propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value);
} else {
propertyAccessor = valueEntity.getPropertyPathAccessor(value);
}
return cache.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::from)
.getDocumentPointer(mappingContext, persistentEntity, propertyAccessor);
}
private boolean usesDefaultLookup(MongoPersistentProperty property) {
if (property.isDocumentReference()) {
return DEFAULT_LOOKUP_PATTERN.matcher(property.getDocumentReference().lookup()).matches();
}
Reference atReference = property.findAnnotation(Reference.class);
if (atReference != null) {
return true;
}
throw new IllegalStateException(String.format("%s does not seem to be define Reference", property));
}
/**
* Value object that computes a document pointer from a given lookup query by identifying SpEL expressions and
* inverting it.
*
* <pre class="code">
* // source
* { 'firstname' : ?#{fn}, 'lastname' : '?#{ln} }
*
* // target
* { 'fn' : ..., 'ln' : ... }
* </pre>
*
* The actual pointer is the computed via
* {@link #getDocumentPointer(MappingContext, MongoPersistentEntity, PersistentPropertyAccessor)} applying values from
* the provided {@link PersistentPropertyAccessor} to the target document by looking at the keys of the expressions
* from the source.
*/
static class LinkageDocument {
static final Pattern pattern = Pattern.compile("\\?#\\{#?[\\w\\d]*\\}");
static final Pattern EXPRESSION_PATTERN = Pattern.compile("\\?#\\{#?(?<fieldName>[\\w\\d\\.\\-)]*)\\}");
static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("###_(?<index>\\d*)_###");
String lookup;
org.bson.Document fetchDocument;
Map<Integer, String> mapMap;
private final String lookup;
private final org.bson.Document documentPointer;
private final Map<String, String> placeholderMap;
public LinkageDocument(String lookup) {
static LinkageDocument from(String lookup) {
return new LinkageDocument(lookup);
}
private LinkageDocument(String lookup) {
this.lookup = lookup;
this.placeholderMap = new LinkedHashMap<>();
int index = 0;
Matcher matcher = EXPRESSION_PATTERN.matcher(lookup);
String targetLookup = lookup;
Matcher matcher = pattern.matcher(lookup);
int index = 0;
mapMap = new LinkedHashMap<>();
// TODO: Make explicit what's happening here
while (matcher.find()) {
String expr = matcher.group();
String sanitized = expr.substring(0, expr.length() - 1).replace("?#{#", "").replace("?#{", "")
.replace("target.", "").replaceAll("'", "");
mapMap.put(index, sanitized);
targetLookup = targetLookup.replace(expr, index + "");
String expression = matcher.group();
String fieldName = matcher.group("fieldName").replace("target.", "");
String placeholder = placeholder(index);
placeholderMap.put(placeholder, fieldName);
targetLookup = targetLookup.replace(expression, "'" + placeholder + "'");
index++;
}
fetchDocument = org.bson.Document.parse(targetLookup);
this.documentPointer = org.bson.Document.parse(targetLookup);
}
org.bson.Document get(MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
private String placeholder(int index) {
return "###_" + index + "_###";
}
org.bson.Document targetDocument = new Document();
private boolean isPlaceholder(String key) {
return PLACEHOLDER_PATTERN.matcher(key).matches();
}
// TODO: recursive matching over nested Documents or would the parameter binding json parser be a thing?
// like we have it ordered by index values and could provide the parameter array from it.
DocumentPointer<Object> getDocumentPointer(
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
return () -> updatePlaceholders(documentPointer, new Document(), mappingContext, persistentEntity,
propertyAccessor);
}
for (Entry<String, Object> entry : fetchDocument.entrySet()) {
Document updatePlaceholders(org.bson.Document source, org.bson.Document target,
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
if (entry.getKey().equals("target")) {
for (Entry<String, Object> entry : source.entrySet()) {
String refKey = mapMap.get(entry.getValue());
if (entry.getKey().startsWith("$")) {
throw new InvalidDataAccessApiUsageException(String.format(
"Cannot derive document pointer from lookup '%s' using query operator (%s). Please consider registering a custom converter.",
lookup, entry.getKey()));
}
if (persistentEntity.hasIdProperty()) {
targetDocument.put(refKey, propertyAccessor.getProperty(persistentEntity.getIdProperty()));
if (entry.getValue() instanceof Document) {
MongoPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(entry.getKey());
if (persistentProperty != null && persistentProperty.isEntity()) {
MongoPersistentEntity<?> nestedEntity = mappingContext.getPersistentEntity(persistentProperty.getType());
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
nestedEntity, nestedEntity.getPropertyAccessor(propertyAccessor.getProperty(persistentProperty))));
} else {
targetDocument.put(refKey, propertyAccessor.getBean());
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
persistentEntity, propertyAccessor));
}
continue;
}
Object target = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(entry.getKey()));
String refKey = mapMap.get(entry.getValue());
targetDocument.put(refKey, target);
if (placeholderMap.containsKey(entry.getValue())) {
String attribute = placeholderMap.get(entry.getValue());
if (attribute.contains(".")) {
attribute = attribute.substring(attribute.lastIndexOf('.') + 1);
}
String fieldName = entry.getKey().equals("_id") ? "id" : entry.getKey();
if (!fieldName.contains(".")) {
Object targetValue = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(fieldName));
target.put(attribute, targetValue);
continue;
}
PersistentPropertyPath<?> path = mappingContext
.getPersistentPropertyPath(PropertyPath.from(fieldName, persistentEntity.getTypeInformation()));
Object targetValue = propertyAccessor.getProperty(path);
target.put(attribute, targetValue);
continue;
}
target.put(entry.getKey(), entry.getValue());
}
return targetDocument;
return target;
}
}
}

View File

@@ -45,6 +45,7 @@ import org.springframework.context.ApplicationContextAware;
import org.springframework.core.CollectionFactory;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.data.annotation.Reference;
import org.springframework.data.convert.TypeMapper;
import org.springframework.data.mapping.Association;
import org.springframework.data.mapping.MappingException;
@@ -526,7 +527,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return;
}
if (property.isDocumentReference()) {
if (property.isDocumentReference() || (!property.isDbReference() && property.findAnnotation(Reference.class) != null)) {
// quite unusual but sounds like worth having?
@@ -587,43 +588,46 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return createDBRef(object, referringProperty);
}
public Object toDocumentReference(Object source, @Nullable MongoPersistentProperty referringProperty) {
@Override
public DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
if (source instanceof LazyLoadingProxy) {
return ((LazyLoadingProxy) source).getSource();
return () -> ((LazyLoadingProxy) source).getSource();
}
if (referringProperty != null) {
Assert.notNull(referringProperty, "Cannot create DocumentReference. The referringProperty must not be null!");
if (referringProperty.isDbReference()) {
return toDBRef(source, referringProperty);
return () -> toDBRef(source, referringProperty);
}
if (referringProperty.isDocumentReference()) {
if (referringProperty.isDocumentReference() || referringProperty.findAnnotation(Reference.class) != null) {
return createDocumentPointer(source, referringProperty);
}
}
throw new RuntimeException("oops - what's that " + source);
throw new IllegalArgumentException("The referringProperty is neither a DBRef nor a document reference");
}
Object createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
DocumentPointer<?> createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
if (referringProperty == null) {
return source;
return () -> source;
}
if(source instanceof DocumentPointer) {
return (DocumentPointer<?>) source;
}
if (ClassUtils.isAssignableValue(referringProperty.getType(), source)
&& conversionService.canConvert(referringProperty.getType(), DocumentPointer.class)) {
return conversionService.convert(source, DocumentPointer.class).getPointer();
return conversionService.convert(source, DocumentPointer.class);
}
if (ClassUtils.isAssignableValue(referringProperty.getAssociationTargetType(), source)) {
return documentPointerFactory.computePointer(referringProperty, source, referringProperty.getActualType())
.getPointer();
return documentPointerFactory.computePointer(mappingContext, referringProperty, source, referringProperty.getActualType());
}
return source;
return () -> source;
}
/**
@@ -813,7 +817,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
if (prop.isAssociation()) {
accessor.put(prop, new DocumentPointerFactory(conversionService, mappingContext)
.computePointer(prop, obj, valueType.getType()).getPointer());
.computePointer(mappingContext, prop, obj, valueType.getType()).getPointer());
return;
}
@@ -864,13 +868,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return mappingContext.getPersistentEntity(property.getAssociationTargetType()).getIdentifierAccessor(it)
.getIdentifier();
}
}).collect(Collectors.toList()), ClassTypeInformation.from(DocumentPointer.class), new BasicDBList());
}).collect(Collectors.toList()), ClassTypeInformation.from(DocumentPointer.class), new ArrayList<>());
}
if (property.hasExplicitWriteTarget()) {
return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>());
}
return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList());
return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>());
}
List<Object> dbList = new ArrayList<>(collection.size());
@@ -960,7 +965,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
collection.add(getPotentiallyConvertedSimpleWrite(element,
componentType != null ? componentType.getType() : Object.class));
} else if (element instanceof Collection || elementType.isArray()) {
collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new BasicDBList()));
collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new ArrayList<>()));
} else {
Document document = new Document();
writeInternal(element, document, componentType);
@@ -992,7 +997,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
writeSimpleInternal(val, bson, simpleKey);
} else if (val instanceof Collection || val.getClass().isArray()) {
BsonUtils.addToMap(bson, simpleKey,
writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new BasicDBList()));
writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new ArrayList<>()));
} else {
Document document = new Document();
TypeInformation<?> valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType()

View File

@@ -27,6 +27,9 @@ import org.springframework.util.StringUtils;
import com.mongodb.client.MongoCollection;
/**
* {@link ReferenceLoader} implementation using a {@link MongoDatabaseFactory} to obtain raw {@link Document documents}
* for linked entities via a {@link ReferenceLoader.DocumentReferenceQuery}.
*
* @author Christoph Strobl
*/
public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader {
@@ -35,6 +38,9 @@ public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader {
private final MongoDatabaseFactory mongoDbFactory;
/**
* @param mongoDbFactory must not be {@literal null}.
*/
public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) {
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
@@ -43,20 +49,27 @@ public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader {
}
@Override
public Iterable<Document> fetchMany(DocumentReferenceQuery filter, ReferenceCollection context) {
public Iterable<Document> fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context) {
MongoCollection<Document> collection = getCollection(context);
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Bulk fetching {} from {}.{}.", filter,
LOGGER.trace("Bulk fetching {} from {}.{}.", referenceQuery,
StringUtils.hasText(context.getDatabase()) ? context.getDatabase()
: collection.getNamespace().getDatabaseName(),
context.getCollection());
}
return filter.apply(collection);
return referenceQuery.apply(collection);
}
/**
* Obtain the {@link MongoCollection} for a given {@link ReferenceCollection} from the underlying
* {@link MongoDatabaseFactory}.
*
* @param context must not be {@literal null}.
* @return the {@link MongoCollection} targeted by the {@link ReferenceCollection}.
*/
protected MongoCollection<Document> getCollection(ReferenceCollection context) {
return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(),

View File

@@ -17,6 +17,8 @@ package org.springframework.data.mongodb.core.convert;
import org.bson.conversions.Bson;
import org.springframework.data.convert.EntityWriter;
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
import org.springframework.data.mongodb.core.mapping.DocumentReference;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.util.TypeInformation;
@@ -61,6 +63,7 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity<?> entity) {
return convertToMongoType(obj, entity.getTypeInformation());
}
/**
* Creates a {@link DBRef} to refer to the given object.
*
@@ -71,7 +74,16 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
*/
DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referingProperty);
default Object toDocumentReference(Object source, @Nullable MongoPersistentProperty referringProperty) {
return toDBRef(source, referringProperty);
/**
* Creates a the {@link DocumentPointer} representing the link to another entity.
*
* @param source the object to create a document link to.
* @param referringProperty the client-side property referring to the object which might carry additional metadata for
* the {@link DBRef} object to create. Can be {@literal null}.
* @return will never be {@literal null}.
* @since 3.3
*/
default DocumentPointer<?> toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
return () -> toDBRef(source, referringProperty);
}
}

View File

@@ -77,9 +77,4 @@ public enum NoOpDbRefResolver implements DbRefResolver {
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
return null;
}
@Override
public ReferenceLoader getReferenceLoader() {
return handle();
}
}

View File

@@ -26,6 +26,7 @@ import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.annotation.Reference;
import org.springframework.data.domain.Example;
import org.springframework.data.mapping.Association;
import org.springframework.data.mapping.MappingException;
@@ -672,8 +673,8 @@ public class QueryMapper {
return (DBRef) source;
}
if(property != null && property.isDocumentReference()) {
return converter.toDocumentReference(source, property);
if(property != null && (property.isDocumentReference() || (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) {
return converter.toDocumentPointer(source, property).getPointer();
}
return converter.toDBRef(source, property);

View File

@@ -26,33 +26,70 @@ import org.springframework.lang.Nullable;
import com.mongodb.client.MongoCollection;
/**
* The {@link ReferenceLoader} obtains raw {@link Document documents} for linked entities via a
* {@link ReferenceLoader.DocumentReferenceQuery}.
*
* @author Christoph Strobl
* @since 3.3
*/
public interface ReferenceLoader {
/**
* Obtain a single {@link Document} matching the given {@literal referenceQuery} in the {@literal context}.
*
* @param referenceQuery must not be {@literal null}.
* @param context must not be {@literal null}.
* @return the matching {@link Document} or {@literal null} if none found.
*/
@Nullable
default Document fetchOne(DocumentReferenceQuery filter, ReferenceCollection context) {
default Document fetchOne(DocumentReferenceQuery referenceQuery, ReferenceCollection context) {
Iterator<Document> it = fetchMany(filter, context).iterator();
Iterator<Document> it = fetchMany(referenceQuery, context).iterator();
return it.hasNext() ? it.next() : null;
}
Iterable<Document> fetchMany(DocumentReferenceQuery filter, ReferenceCollection context);
/**
* Obtain multiple {@link Document} matching the given {@literal referenceQuery} in the {@literal context}.
*
* @param referenceQuery must not be {@literal null}.
* @param context must not be {@literal null}.
* @return the matching {@link Document} or {@literal null} if none found.
*/
Iterable<Document> fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context);
/**
* The {@link DocumentReferenceQuery} defines the criteria by which {@link Document documents} should be matched
* applying potentially given order criteria.
*/
interface DocumentReferenceQuery {
Bson getFilter();
/**
* Get the query to obtain matching {@link Document documents}.
*
* @return never {@literal null}.
*/
Bson getQuery();
/**
* Get the sort criteria for ordering results.
*
* @return an empty {@link Document} by default. Never {@literal null}.
*/
default Bson getSort() {
return new Document();
}
// TODO: Move apply method into something else that holds the collection and knows about single item/multi-item
// processing
default Iterable<Document> apply(MongoCollection<Document> collection) {
return restoreOrder(collection.find(getFilter()).sort(getSort()));
return restoreOrder(collection.find(getQuery()).sort(getSort()));
}
/**
* Restore the order of fetched documents.
*
* @param documents must not be {@literal null}.
* @return never {@literal null}.
*/
default Iterable<Document> restoreOrder(Iterable<Document> documents) {
return documents;
}
@@ -62,14 +99,14 @@ public interface ReferenceLoader {
return new DocumentReferenceQuery() {
@Override
public Bson getFilter() {
public Bson getQuery() {
return bson;
}
@Override
public Iterable<Document> apply(MongoCollection<Document> collection) {
Document result = collection.find(getFilter()).sort(getSort()).limit(1).first();
Document result = collection.find(getQuery()).sort(getSort()).limit(1).first();
return result != null ? Collections.singleton(result) : Collections.emptyList();
}
};
@@ -80,16 +117,15 @@ public interface ReferenceLoader {
return new DocumentReferenceQuery() {
@Override
public Bson getFilter() {
public Bson getQuery() {
return bson;
}
@Override
public Iterable<Document> apply(MongoCollection<Document> collection) {
return collection.find(getFilter()).sort(getSort());
return collection.find(getQuery()).sort(getSort());
}
};
}
}
}

View File

@@ -15,6 +15,7 @@
*/
package org.springframework.data.mongodb.core.convert;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -29,7 +30,6 @@ import java.util.stream.Collectors;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.SpELContext;
import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery;
@@ -64,6 +64,12 @@ public final class ReferenceLookupDelegate {
private final SpELContext spELContext;
private final ParameterBindingDocumentCodec codec;
/**
* Create a new {@link ReferenceLookupDelegate}.
*
* @param mappingContext must not be {@literal null}.
* @param spELContext must not be {@literal null}.
*/
public ReferenceLookupDelegate(
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
SpELContext spELContext) {
@@ -76,11 +82,20 @@ public final class ReferenceLookupDelegate {
this.codec = new ParameterBindingDocumentCodec();
}
/**
* Read the reference expressed by the given property.
*
* @param property the reference defining property. Must not be {@literal null}. THe
* @param value the source value identifying to the referenced entity. Must not be {@literal null}.
* @param lookupFunction to execute a lookup query. Must not be {@literal null}.
* @param entityReader the callback to convert raw source values into actual domain types. Must not be
* {@literal null}.
* @return can be {@literal null}.
*/
@Nullable
Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction,
public Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction,
MongoEntityReader entityReader) {
DocumentReferenceQuery filter = computeFilter(property, value, spELContext);
ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext);
@@ -100,10 +115,12 @@ public final class ReferenceLookupDelegate {
private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value,
SpELContext spELContext) {
// Use the first value as a reference for others in case of collection like
if (value instanceof Iterable) {
value = ((Iterable<?>) value).iterator().next();
}
// handle DBRef value
if (value instanceof DBRef) {
return ReferenceCollection.fromDBRef((DBRef) value);
}
@@ -112,7 +129,7 @@ public final class ReferenceLookupDelegate {
if (value instanceof Document) {
Document ref = (Document) value;
Document documentPointer = (Document) value;
if (property.isDocumentReference()) {
@@ -120,15 +137,13 @@ public final class ReferenceLookupDelegate {
DocumentReference documentReference = property.getDocumentReference();
String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext,
() -> ref.get("db", String.class));
() -> documentPointer.get("db", String.class));
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext,
() -> ref.get("collection",
collection));
() -> documentPointer.get("collection", collection));
return new ReferenceCollection(targetDatabase, targetCollection);
}
return new ReferenceCollection(ref.getString("db"), ref.get("collection",
collection));
return new ReferenceCollection(documentPointer.getString("db"), documentPointer.get("collection", collection));
}
if (property.isDocumentReference()) {
@@ -137,16 +152,24 @@ public final class ReferenceLookupDelegate {
DocumentReference documentReference = property.getDocumentReference();
String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null);
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext,
() -> collection);
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> collection);
return new ReferenceCollection(targetDatabase, targetCollection);
}
return new ReferenceCollection(null,
collection);
return new ReferenceCollection(null, collection);
}
/**
* Use the given {@link ParameterBindingContext} to compute potential expressions against the value.
*
* @param value must not be {@literal null}.
* @param bindingContext must not be {@literal null}.
* @param defaultValue
* @param <T>
* @return can be {@literal null}.
*/
@Nullable
@SuppressWarnings("unchecked")
private <T> T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier<T> defaultValue) {
@@ -154,12 +177,17 @@ public final class ReferenceLookupDelegate {
return defaultValue.get();
}
// parameter binding requires a document, since we do not have one, construct it.
if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) {
String s = "{ 'target-value' : " + value + "}";
T evaluated = (T) codec.decode(s, bindingContext).get("target-value");
return evaluated != null ? evaluated : defaultValue.get();
}
if (BsonUtils.isJsonDocument(value)) {
return (T) codec.decode(value, bindingContext);
}
T evaluated = (T) bindingContext.evaluateExpression(value);
return evaluated != null ? evaluated : defaultValue.get();
}
@@ -171,8 +199,8 @@ public final class ReferenceLookupDelegate {
}
ValueProvider valueProviderFor(Object source) {
return (index) -> {
return (index) -> {
if (source instanceof Document) {
return Streamable.of(((Document) source).values()).toList().get(index);
}
@@ -189,13 +217,24 @@ public final class ReferenceLookupDelegate {
return ctx;
}
/**
* Compute the query to retrieve linked documents.
*
* @param property must not be {@literal null}.
* @param value must not be {@literal null}.
* @param spELContext must not be {@literal null}.
* @return never {@literal null}.
*/
@SuppressWarnings("unchecked")
DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) {
DocumentReference documentReference = property.getDocumentReference();
DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference()
: ReferenceEmulatingDocumentReference.INSTANCE;
String lookup = documentReference.lookup();
Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext), () -> null);
Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext),
() -> new Document());
if (property.isCollectionLike() && value instanceof Collection) {
@@ -225,45 +264,94 @@ public final class ReferenceLookupDelegate {
return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, value, spELContext)), sort);
}
enum ReferenceEmulatingDocumentReference implements DocumentReference {
INSTANCE;
@Override
public Class<? extends Annotation> annotationType() {
return DocumentReference.class;
}
@Override
public String db() {
return "";
}
@Override
public String collection() {
return "";
}
@Override
public String lookup() {
return "{ '_id' : ?#{#target} }";
}
@Override
public String sort() {
return "";
}
@Override
public boolean lazy() {
return false;
}
}
/**
* {@link DocumentReferenceQuery} implementation fetching a single {@link Document}.
*/
static class SingleDocumentReferenceQuery implements DocumentReferenceQuery {
Document filter;
Document sort;
private final Document query;
private final Document sort;
public SingleDocumentReferenceQuery(Document filter, Document sort) {
this.filter = filter;
public SingleDocumentReferenceQuery(Document query, Document sort) {
this.query = query;
this.sort = sort;
}
@Override
public Bson getFilter() {
return filter;
public Bson getQuery() {
return query;
}
@Override
public Document getSort() {
return sort;
}
@Override
public Iterable<Document> apply(MongoCollection<Document> collection) {
Document result = collection.find(getFilter()).limit(1).first();
Document result = collection.find(getQuery()).sort(getSort()).limit(1).first();
return result != null ? Collections.singleton(result) : Collections.emptyList();
}
}
/**
* {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a
* {@link Map} structure. Restores the original map order by matching individual query documents against the actual
* values.
*/
static class MapDocumentReferenceQuery implements DocumentReferenceQuery {
private final Document filter;
private final Document query;
private final Document sort;
private final Map<Object, Document> filterOrderMap;
public MapDocumentReferenceQuery(Document filter, Document sort, Map<Object, Document> filterOrderMap) {
public MapDocumentReferenceQuery(Document query, Document sort, Map<Object, Document> filterOrderMap) {
this.filter = filter;
this.query = query;
this.sort = sort;
this.filterOrderMap = filterOrderMap;
}
@Override
public Bson getFilter() {
return filter;
public Bson getQuery() {
return query;
}
@Override
@@ -289,33 +377,38 @@ public final class ReferenceLookupDelegate {
}
}
/**
* {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a
* {@link Collection} like structure. Restores the original order by matching individual query documents against the
* actual values.
*/
static class ListDocumentReferenceQuery implements DocumentReferenceQuery {
private final Document filter;
private final Document query;
private final Document sort;
public ListDocumentReferenceQuery(Document filter, Document sort) {
public ListDocumentReferenceQuery(Document query, Document sort) {
this.filter = filter;
this.query = query;
this.sort = sort;
}
@Override
public Iterable<Document> restoreOrder(Iterable<Document> documents) {
if (filter.containsKey("$or")) {
List<Document> ors = filter.get("$or", List.class);
List<Document> target = documents instanceof List ? (List<Document>) documents
: Streamable.of(documents).toList();
return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2))
.collect(Collectors.toList());
List<Document> target = documents instanceof List ? (List<Document>) documents
: Streamable.of(documents).toList();
if (!sort.isEmpty() || !query.containsKey("$or")) {
return target;
}
return documents;
List<Document> ors = query.get("$or", List.class);
return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)).collect(Collectors.toList());
}
public Document getFilter() {
return filter;
public Document getQuery() {
return query;
}
@Override
@@ -339,9 +432,18 @@ public final class ReferenceLookupDelegate {
}
}
/**
* The function that can execute a given {@link DocumentReferenceQuery} within the {@link ReferenceCollection} to
* obtain raw results.
*/
@FunctionalInterface
interface LookupFunction {
/**
* @param referenceQuery never {@literal null}.
* @param referenceCollection never {@literal null}.
* @return never {@literal null}.
*/
Iterable<Document> apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection);
}
}

View File

@@ -15,6 +15,7 @@
*/
package org.springframework.data.mongodb.core.convert;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.util.TypeInformation;
import org.springframework.lang.Nullable;
@@ -23,47 +24,92 @@ import org.springframework.util.Assert;
import com.mongodb.DBRef;
/**
* The {@link ReferenceResolver} allows to load and convert linked entities.
*
* @author Christoph Strobl
*/
public interface ReferenceResolver {
/**
* Resolve the association defined via the given property from a given source value. May deliver a
* {@link LazyLoadingProxy proxy instance} in case of a lazy loading association.
*
* @param property the association defining property.
* @param source the association source value.
* @param referenceLookupDelegate the lookup executing component.
* @param entityReader conversion function capable of constructing entities from raw source.
* @return can be {@literal null}.
*/
@Nullable
Object resolveReference(MongoPersistentProperty property, Object source,
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader);
ReferenceLoader getReferenceLoader();
/**
* {@link ReferenceCollection} is a value object that contains information about the target database and collection
* name of an association.
*/
class ReferenceCollection {
@Nullable
@Nullable //
private final String database;
private final String collection;
/**
* @param database can be {@literal null} to indicate the configured default
* {@link MongoDatabaseFactory#getMongoDatabase() database} should be used.
* @param collection the target collection name. Must not be {@literal null}.
*/
public ReferenceCollection(@Nullable String database, String collection) {
Assert.hasText(collection, "Collection must not be empty or null");
Assert.hasText(collection, "Collection must not be empty or null!");
this.database = database;
this.collection = collection;
}
static ReferenceCollection fromDBRef(DBRef dbRef) {
/**
* Create a new instance of {@link ReferenceCollection} from the given {@link DBRef}.
*
* @param dbRef must not be {@literal null}.
* @return new instance of {@link ReferenceCollection}.
*/
public static ReferenceCollection fromDBRef(DBRef dbRef) {
return new ReferenceCollection(dbRef.getDatabaseName(), dbRef.getCollectionName());
}
/**
* Get the target collection name.
*
* @return never {@literal null}.
*/
public String getCollection() {
return collection;
}
/**
* Get the target database name. If {@literal null} the default database should be used.
*
* @return can be {@literal null}.
*/
@Nullable
public String getDatabase() {
return database;
}
}
/**
* Domain type conversion callback interface that allows to read
*/
@FunctionalInterface
interface MongoEntityReader {
Object read(Object source, TypeInformation<?> property);
/**
* Read values from the given source into an object defined via the given {@link TypeInformation}.
*
* @param source never {@literal null}.
* @param typeInformation information abount the desired target type.
* @return never {@literal null}.
*/
Object read(Object source, TypeInformation<?> typeInformation);
}
}

View File

@@ -349,7 +349,13 @@ public class BsonUtils {
* @since 3.0
*/
public static boolean isJsonDocument(@Nullable String value) {
return StringUtils.hasText(value) && (value.startsWith("{") && value.endsWith("}"));
if(!StringUtils.hasText(value)) {
return false;
}
String potentialJson = value.trim();
return potentialJson.startsWith("{") && potentialJson.endsWith("}");
}
/**

View File

@@ -38,6 +38,7 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Reference;
import org.springframework.data.convert.WritingConverter;
import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils;
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
@@ -380,6 +381,33 @@ public class MongoTemplateDocumentReferenceTests {
new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-1-referenced-object"));
}
@Test // GH-3602
void useOrderFromAnnotatedSort() {
String rootCollectionName = template.getCollectionName(CollectionRefRoot.class);
String refCollectionName = template.getCollectionName(SimpleObjectRef.class);
Document refSource1 = new Document("_id", "ref-1").append("value", "me-the-1-referenced-object");
Document refSource2 = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object");
Document refSource3 = new Document("_id", "ref-3").append("value", "me-the-3-referenced-object");
Document source = new Document("_id", "id-1").append("value", "v1").append("simpleSortedValueRef",
Arrays.asList("ref-1", "ref-3", "ref-2"));
template.execute(db -> {
db.getCollection(refCollectionName).insertOne(refSource1);
db.getCollection(refCollectionName).insertOne(refSource2);
db.getCollection(refCollectionName).insertOne(refSource3);
db.getCollection(rootCollectionName).insertOne(source);
return null;
});
CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class);
assertThat(result.getSimpleSortedValueRef()).containsExactly(
new SimpleObjectRef("ref-3", "me-the-3-referenced-object"),
new SimpleObjectRef("ref-2", "me-the-2-referenced-object"),
new SimpleObjectRef("ref-1", "me-the-1-referenced-object"));
}
@Test // GH-3602
void readObjectReferenceFromDocumentNotRelatingToTheIdProperty() {
@@ -857,7 +885,8 @@ public class MongoTemplateDocumentReferenceTests {
template.save(book);
template.update(Book.class).matching(where("id").is(book.id)).apply(new Update().set("publisher", publisher)).first();
template.update(Book.class).matching(where("id").is(book.id)).apply(new Update().set("publisher", publisher))
.first();
Document target = template.execute(db -> {
return db.getCollection(template.getCollectionName(Book.class)).find(Filters.eq("_id", book.id)).first();
@@ -890,6 +919,56 @@ public class MongoTemplateDocumentReferenceTests {
assertThat(result.publisher).isNotNull();
}
@Test // GH-3602
void allowsDirectUsageOfAtReference() {
Publisher publisher = new Publisher();
publisher.id = "p-1";
publisher.acronym = "TOR";
publisher.name = "Tom Doherty Associates";
template.save(publisher);
UsingAtReference root = new UsingAtReference();
root.id = "book-1";
root.publisher = publisher;
template.save(root);
Document target = template.execute(db -> {
return db.getCollection(template.getCollectionName(UsingAtReference.class)).find(Filters.eq("_id", root.id)).first();
});
assertThat(target).containsEntry("publisher", "p-1");
UsingAtReference result = template.findOne(query(where("id").is(root.id)), UsingAtReference.class);
assertThat(result.publisher).isNotNull();
}
@Test // GH-3602
void updateWhenUsingAtReferenceDirectly() {
Publisher publisher = new Publisher();
publisher.id = "p-1";
publisher.acronym = "TOR";
publisher.name = "Tom Doherty Associates";
template.save(publisher);
UsingAtReference root = new UsingAtReference();
root.id = "book-1";
template.save(root);
template.update(UsingAtReference.class).matching(where("id").is(root.id)).apply(new Update().set("publisher", publisher)).first();
Document target = template.execute(db -> {
return db.getCollection(template.getCollectionName(UsingAtReference.class)).find(Filters.eq("_id", root.id)).first();
});
assertThat(target).containsEntry("publisher", "p-1");
}
@Data
static class SingleRefRoot {
@@ -930,6 +1009,9 @@ public class MongoTemplateDocumentReferenceTests {
@DocumentReference(lookup = "{ '_id' : '?#{#target}' }") //
List<SimpleObjectRef> simpleValueRef;
@DocumentReference(lookup = "{ '_id' : '?#{#target}' }", sort = "{ '_id' : -1 } ") //
List<SimpleObjectRef> simpleSortedValueRef;
@DocumentReference(lookup = "{ '_id' : '?#{#target}' }") //
Map<String, SimpleObjectRef> mapValueRef;
@@ -1051,7 +1133,8 @@ public class MongoTemplateDocumentReferenceTests {
static class WithRefA/* to B */ implements ReferenceAble {
@Id String id;
@DocumentReference WithRefB toB;
@DocumentReference //
WithRefB toB;
@Override
public Object toReference() {
@@ -1065,9 +1148,11 @@ public class MongoTemplateDocumentReferenceTests {
static class WithRefB/* to A */ implements ReferenceAble {
@Id String id;
@DocumentReference(lazy = true) WithRefA lazyToA;
@DocumentReference(lazy = true) //
WithRefA lazyToA;
@DocumentReference WithRefA eagerToA;
@DocumentReference //
WithRefA eagerToA;
@Override
public Object toReference() {
@@ -1091,7 +1176,8 @@ public class MongoTemplateDocumentReferenceTests {
String id;
@DocumentReference(lookup = "{ 'acronym' : ?#{acc}, 'name' : ?#{n} }") Publisher publisher;
@DocumentReference(lookup = "{ 'acronym' : ?#{acc}, 'name' : ?#{n} }") //
Publisher publisher;
}
@@ -1102,4 +1188,13 @@ public class MongoTemplateDocumentReferenceTests {
String name;
}
@Data
static class UsingAtReference {
String id;
@Reference //
Publisher publisher;
}
}

View File

@@ -0,0 +1,139 @@
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.convert;
import static org.assertj.core.api.Assertions.*;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import org.bson.Document;
import org.junit.jupiter.api.Test;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.mongodb.core.convert.DocumentPointerFactory.LinkageDocument;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
/**
* @author Christoph Strobl
*/
public class DocumentPointerFactoryUnitTests {
@Test // GH-3602
void errorsOnMongoOperatorUsage() {
LinkageDocument source = LinkageDocument.from("{ '_id' : { '$eq' : 1 } }");
assertThatExceptionOfType(InvalidDataAccessApiUsageException.class)
.isThrownBy(() -> getPointerValue(source, new Book())) //
.withMessageContaining("$eq");
}
@Test // GH-3602
void computesStaticPointer() {
LinkageDocument source = LinkageDocument.from("{ '_id' : 1 }");
assertThat(getPointerValue(source, new Book())).isEqualTo(new Document("_id", 1));
}
@Test // GH-3602
void computesPointerWithIdValuePlaceholder() {
LinkageDocument source = LinkageDocument.from("{ '_id' : ?#{id} }");
assertThat(getPointerValue(source, new Book("book-1", null, null))).isEqualTo(new Document("id", "book-1"));
}
@Test // GH-3602
void computesPointerForNonIdValuePlaceholder() {
LinkageDocument source = LinkageDocument.from("{ 'title' : ?#{book_title} }");
assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null)))
.isEqualTo(new Document("book_title", "Living With A Seal"));
}
@Test // GH-3602
void computesPlaceholderFromNestedPathValue() {
LinkageDocument source = LinkageDocument.from("{ 'metadata.pages' : ?#{p} } }");
assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null, new Metadata(272))))
.isEqualTo(new Document("p", 272));
}
@Test // GH-3602
void computesNestedPlaceholderPathValue() {
LinkageDocument source = LinkageDocument.from("{ 'metadata' : { 'pages' : ?#{metadata.pages} } }");
assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null, new Metadata(272))))
.isEqualTo(new Document("metadata", new Document("pages", 272)));
}
Object getPointerValue(LinkageDocument linkageDocument, Object value) {
MongoMappingContext mappingContext = new MongoMappingContext();
MongoPersistentEntity<?> persistentEntity = mappingContext.getPersistentEntity(value.getClass());
return linkageDocument
.getDocumentPointer(mappingContext, persistentEntity, persistentEntity.getPropertyPathAccessor(value))
.getPointer();
}
@Data
@AllArgsConstructor
@NoArgsConstructor
static class Book {
String id;
String title;
List<Author> author;
Metadata metadata;
public Book(String id, String title, List<Author> author) {
this.id = id;
this.title = title;
this.author = author;
}
}
static class Metadata {
int pages;
public Metadata(int pages) {
this.pages = pages;
}
public int getPages() {
return pages;
}
public void setPages(int pages) {
this.pages = pages;
}
}
@Data
static class Author {
String id;
String firstname;
String lastname;
}
}

View File

@@ -580,9 +580,9 @@ class MappingMongoConverterUnitTests {
org.bson.Document map = (org.bson.Document) field;
Object foo = map.get("Foo");
assertThat(foo).isInstanceOf(BasicDBList.class);
assertThat(foo).isInstanceOf(List.class);
BasicDBList value = (BasicDBList) foo;
List value = (List) foo;
assertThat(value.size()).isEqualTo(1);
assertThat(value.get(0)).isEqualTo("Bar");
}
@@ -695,9 +695,9 @@ class MappingMongoConverterUnitTests {
assertThat(result.containsKey("Foo")).isTrue();
assertThat(result.get("Foo")).isNotNull();
assertThat(result.get("Foo")).isInstanceOf(BasicDBList.class);
assertThat(result.get("Foo")).isInstanceOf(List.class);
BasicDBList list = (BasicDBList) result.get("Foo");
List list = (List) result.get("Foo");
assertThat(list.size()).isEqualTo(1);
assertThat(list.get(0)).isEqualTo(Locale.US.toString());
@@ -744,7 +744,7 @@ class MappingMongoConverterUnitTests {
org.bson.Document map = (org.bson.Document) mapObject;
Object valueObject = map.get("foo");
assertThat(valueObject).isInstanceOf(BasicDBList.class);
assertThat(valueObject).isInstanceOf(List.class);
List<Object> list = (List<Object>) valueObject;
assertThat(list.size()).isEqualTo(1);

View File

@@ -129,10 +129,6 @@ public class ReactivePerformanceTests {
return null;
}
@Override
public ReferenceLoader getReferenceLoader() {
return null;
}
}, context);
operations = new ReactiveMongoTemplate(mongoDbFactory, converter);

View File

@@ -902,6 +902,10 @@ It is possible to alter resolution defaults (listed below) via the attributes of
| The single document lookup query evaluating placeholders via SpEL expressions using `#target` as the marker for a given source value. `Collection` like or `Map` properties combine individual lookups via an `$or` operator.
| An `_id` field based query (`{ '_id' : ?#{#target} }`) using the loaded source value.
| `sort`
| Used for sorting result documents on server side.
| None by default. Result order of `Collection` like properties is restored based on the used lookup query.
| `lazy`
| If set to `true` value resolution is delayed upon first access of the property.
| Resolves properties eagerly by default.
@@ -1182,7 +1186,7 @@ We know it is tempting to use all kinds of MongoDB query operators in the lookup
* Mind that resolution takes time and consider a lazy strategy.
* A collection of document references is bulk loaded using an `$or` operator. +
The original element order is restored in memory which cannot be done when using MongoDB query operators.
In this case Results will be ordered as they are received from the store.
In this case Results will be ordered as they are received from the store or via the provided `@DocumentReference(sort = ...)` attribute.
And a few more general remarks: