Reduce allocations in query and update mapping.

Introduce EmptyDocument and utility methods in BsonUtils. Avoid entrySet and iterator creation for document iterations/inspections.

Relates to: #3760
Original Pull Request: #3809
This commit is contained in:
Mark Paluch
2021-09-08 10:04:58 +02:00
committed by Christoph Strobl
parent 8fb0e1326b
commit a26e780957
15 changed files with 298 additions and 67 deletions

View File

@@ -156,5 +156,14 @@ public class MappedDocument {
public List<ArrayFilter> getArrayFilters() {
return delegate.getArrayFilters();
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters()
*/
@Override
public boolean hasArrayFilters() {
return delegate.hasArrayFilters();
}
}
}

View File

@@ -613,7 +613,7 @@ class QueryOperations {
UpdateContext(MappedDocument update, boolean upsert) {
super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
super(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())));
this.multi = false;
this.upsert = upsert;
this.mappedDocument = update;

View File

@@ -135,7 +135,7 @@ class DocumentAccessor {
*/
@Nullable
public Object getRawId(MongoPersistentEntity<?> entity) {
return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.asMap(document).get("_id");
return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.get(document, "_id");
}
/**

View File

@@ -25,7 +25,6 @@ import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
@@ -1325,21 +1324,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return map;
}
for (Entry<String, Object> entry : sourceMap.entrySet()) {
sourceMap.forEach((k, v) -> {
if (typeMapper.isTypeKey(entry.getKey())) {
continue;
if (typeMapper.isTypeKey(k)) {
return;
}
Object key = potentiallyUnescapeMapKey(entry.getKey());
Object key = potentiallyUnescapeMapKey(k);
if (!rawKeyType.isAssignableFrom(key.getClass())) {
key = doConvert(key, rawKeyType);
}
Object value = entry.getValue();
Object value = v;
map.put(key, value == null ? value : context.convert(value, valueType));
}
});
return map;
}

View File

@@ -140,6 +140,9 @@ public interface MongoConverter
if (ObjectId.isValid(id.toString())) {
return new ObjectId(id.toString());
}
// avoid ConversionException as convertToMongoType will return String anyways.
return id;
}
}

View File

@@ -193,12 +193,11 @@ public class QueryMapper {
Assert.notNull(sortObject, "SortObject must not be null!");
if (sortObject.isEmpty()) {
return new Document();
return BsonUtils.EMPTY_DOCUMENT;
}
Document mappedSort = mapFieldsToPropertyNames(sortObject, entity);
mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT);
return mappedSort;
return mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT);
}
/**
@@ -215,42 +214,51 @@ public class QueryMapper {
Assert.notNull(fieldsObject, "FieldsObject must not be null!");
Document mappedFields = mapFieldsToPropertyNames(fieldsObject, entity);
mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE);
return mappedFields;
return mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE);
}
private Document mapFieldsToPropertyNames(Document fields, @Nullable MongoPersistentEntity<?> entity) {
if (fields.isEmpty()) {
return new Document();
return BsonUtils.EMPTY_DOCUMENT;
}
Document target = new Document();
for (Map.Entry<String, Object> entry : BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).entrySet()) {
Field field = createPropertyField(entity, entry.getKey(), mappingContext);
BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).forEach((k, v) -> {
Field field = createPropertyField(entity, k, mappingContext);
if (field.getProperty() != null && field.getProperty().isUnwrapped()) {
continue;
return;
}
target.put(field.getMappedKey(), entry.getValue());
}
target.put(field.getMappedKey(), v);
});
return target;
}
private void mapMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity, MetaMapping metaMapping) {
private Document mapMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity,
MetaMapping metaMapping) {
if (entity == null) {
return;
return source;
}
if (entity.hasTextScoreProperty() && !MetaMapping.IGNORE.equals(metaMapping)) {
if (source == BsonUtils.EMPTY_DOCUMENT) {
source = new Document();
}
MongoPersistentProperty textScoreProperty = entity.getTextScoreProperty();
if (MetaMapping.FORCE.equals(metaMapping)
|| (MetaMapping.WHEN_PRESENT.equals(metaMapping) && source.containsKey(textScoreProperty.getFieldName()))) {
source.putAll(getMappedTextScoreField(textScoreProperty));
}
}
return source;
}
private Document filterUnwrappedObjects(Document fieldsObject, @Nullable MongoPersistentEntity<?> entity) {
@@ -679,7 +687,7 @@ public class QueryMapper {
private Entry<String, Object> createMapEntry(String key, @Nullable Object value) {
Assert.hasText(key, "Key must not be null or empty!");
return Collections.singletonMap(key, value).entrySet().iterator().next();
return new AbstractMap.SimpleEntry<>(key, value);
}
private Object createReferenceFor(Object source, MongoPersistentProperty property) {
@@ -733,13 +741,13 @@ public class QueryMapper {
return false;
}
Set<String> keys = BsonUtils.asMap((Bson) candidate).keySet();
Map<String, Object> map = BsonUtils.asMap((Bson) candidate);
if (keys.size() != 1) {
if (map.size() != 1) {
return false;
}
return isKeyword(keys.iterator().next());
return isKeyword(map.entrySet().iterator().next().getKey());
}
/**
@@ -823,11 +831,14 @@ public class QueryMapper {
public Keyword(Bson bson) {
Set<String> keys = BsonUtils.asMap(bson).keySet();
Assert.isTrue(keys.size() == 1, "Can only use a single value Document!");
Map<String, Object> map = BsonUtils.asMap(bson);
Assert.isTrue(map.size() == 1, "Can only use a single value Document!");
this.key = keys.iterator().next();
this.value = BsonUtils.get(bson, key);
Set<Entry<String, Object>> entries = map.entrySet();
Entry<String, Object> entry = entries.iterator().next();
this.key = entry.getKey();
this.value = entry.getValue();
}
/**

View File

@@ -49,8 +49,8 @@ public class Meta {
}
}
private final Map<String, Object> values = new LinkedHashMap<>(2);
private final Set<CursorOption> flags = new LinkedHashSet<>();
private Map<String, Object> values = Collections.emptyMap();
private Set<CursorOption> flags = Collections.emptySet();
private Integer cursorBatchSize;
private Boolean allowDiskUse;
@@ -63,8 +63,9 @@ public class Meta {
* @param source
*/
Meta(Meta source) {
this.values.putAll(source.values);
this.flags.addAll(source.flags);
this.values = new LinkedHashMap<>(source.values);
this.flags = new LinkedHashSet<>(source.flags);
this.cursorBatchSize = source.cursorBatchSize;
this.allowDiskUse = source.allowDiskUse;
}
@@ -158,6 +159,11 @@ public class Meta {
public boolean addFlag(CursorOption option) {
Assert.notNull(option, "CursorOption must not be null!");
if (this.flags == Collections.EMPTY_SET) {
this.flags = new LinkedHashSet<>(2);
}
return this.flags.add(option);
}
@@ -220,6 +226,10 @@ public class Meta {
Assert.hasText(key, "Meta key must not be 'null' or blank.");
if (values == Collections.EMPTY_MAP) {
values = new LinkedHashMap<>(2);
}
if (value == null || (value instanceof String && !StringUtils.hasText((String) value))) {
this.values.remove(key);
}

View File

@@ -21,6 +21,7 @@ import static org.springframework.util.ObjectUtils.*;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
@@ -30,6 +31,7 @@ import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.bson.Document;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Order;
@@ -52,7 +54,7 @@ public class Query {
private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
private final Set<Class<?>> restrictedTypes = new HashSet<>();
private Set<Class<?>> restrictedTypes = Collections.emptySet();
private final Map<String, CriteriaDefinition> criteria = new LinkedHashMap<>();
private @Nullable Field fieldSpec = null;
private Sort sort = Sort.unsorted();
@@ -235,8 +237,15 @@ public class Query {
Assert.notNull(type, "Type must not be null!");
Assert.notNull(additionalTypes, "AdditionalTypes must not be null");
if (restrictedTypes == Collections.EMPTY_SET) {
restrictedTypes = new HashSet<>(1 + additionalTypes.length);
}
restrictedTypes.add(type);
restrictedTypes.addAll(Arrays.asList(additionalTypes));
if (additionalTypes.length > 0) {
restrictedTypes.addAll(Arrays.asList(additionalTypes));
}
return this;
}
@@ -246,6 +255,17 @@ public class Query {
*/
public Document getQueryObject() {
if (criteria.isEmpty() && restrictedTypes.isEmpty()) {
return BsonUtils.EMPTY_DOCUMENT;
}
if (criteria.size() == 1 && restrictedTypes.isEmpty()) {
for (CriteriaDefinition definition : criteria.values()) {
return definition.getCriteriaObject();
}
}
Document document = new Document();
for (CriteriaDefinition definition : criteria.values()) {
@@ -263,7 +283,7 @@ public class Query {
* @return the field {@link Document}.
*/
public Document getFieldsObject() {
return this.fieldSpec == null ? new Document() : fieldSpec.getFieldsObject();
return this.fieldSpec == null ? BsonUtils.EMPTY_DOCUMENT : fieldSpec.getFieldsObject();
}
/**
@@ -272,13 +292,12 @@ public class Query {
public Document getSortObject() {
if (this.sort.isUnsorted()) {
return new Document();
return BsonUtils.EMPTY_DOCUMENT;
}
Document document = new Document();
this.sort.stream()//
.forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1));
this.sort.forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1));
return document;
}
@@ -557,7 +576,7 @@ public class Query {
target.limit = source.getLimit();
target.hint = source.getHint();
target.collation = source.getCollation();
target.restrictedTypes.addAll(source.getRestrictedTypes());
target.restrictedTypes = new HashSet<>(source.getRestrictedTypes());
if (source.getMeta().hasValues()) {
target.setMeta(new Meta(source.getMeta()));

View File

@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.core.query;
import java.util.Locale;
import org.bson.Document;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.lang.Nullable;
/**
@@ -157,7 +159,7 @@ public class TextQuery extends Query {
return super.getFieldsObject();
}
Document fields = super.getFieldsObject();
Document fields = BsonUtils.asMutableDocument(super.getFieldsObject());
fields.put(getScoreFieldName(), META_TEXT_SCORE);
return fields;
@@ -170,15 +172,14 @@ public class TextQuery extends Query {
@Override
public Document getSortObject() {
Document sort = new Document();
if (this.sortByScore) {
Document sort = new Document();
sort.put(getScoreFieldName(), META_TEXT_SCORE);
sort.putAll(super.getSortObject());
return sort;
}
sort.putAll(super.getSortObject());
return sort;
return super.getSortObject();
}
/*

View File

@@ -56,10 +56,10 @@ public class Update implements UpdateDefinition {
}
private boolean isolated = false;
private Set<String> keysToUpdate = new HashSet<>();
private Map<String, Object> modifierOps = new LinkedHashMap<>();
private Map<String, PushOperatorBuilder> pushCommandBuilders = new LinkedHashMap<>(1);
private List<ArrayFilter> arrayFilters = new ArrayList<>();
private final Set<String> keysToUpdate = new HashSet<>();
private final Map<String, Object> modifierOps = new LinkedHashMap<>();
private Map<String, PushOperatorBuilder> pushCommandBuilders = Collections.emptyMap();
private List<ArrayFilter> arrayFilters = Collections.emptyList();
/**
* Static factory method to create an Update using the provided key
@@ -193,6 +193,11 @@ public class Update implements UpdateDefinition {
public PushOperatorBuilder push(String key) {
if (!pushCommandBuilders.containsKey(key)) {
if (pushCommandBuilders == Collections.EMPTY_MAP) {
pushCommandBuilders = new LinkedHashMap<>(1);
}
pushCommandBuilders.put(key, new PushOperatorBuilder(key));
}
return pushCommandBuilders.get(key);
@@ -412,6 +417,10 @@ public class Update implements UpdateDefinition {
*/
public Update filterArray(CriteriaDefinition criteria) {
if (arrayFilters == Collections.EMPTY_LIST) {
this.arrayFilters = new ArrayList<>();
}
this.arrayFilters.add(criteria::getCriteriaObject);
return this;
}
@@ -427,6 +436,10 @@ public class Update implements UpdateDefinition {
*/
public Update filterArray(String identifier, Object expression) {
if (arrayFilters == Collections.EMPTY_LIST) {
this.arrayFilters = new ArrayList<>();
}
this.arrayFilters.add(() -> new Document(identifier, expression));
return this;
}
@@ -455,6 +468,15 @@ public class Update implements UpdateDefinition {
return Collections.unmodifiableList(this.arrayFilters);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters()
*/
@Override
public boolean hasArrayFilters() {
return !this.arrayFilters.isEmpty();
}
/**
* This method is not called anymore rather override {@link #addMultiFieldOperation(String, String, Object)}.
*

View File

@@ -60,12 +60,26 @@ import com.mongodb.MongoClientSettings;
*/
public class BsonUtils {
/**
* The empty document (immutable). This document is serializable.
*
* @since 3.2.5
*/
public static final Document EMPTY_DOCUMENT = new EmptyDocument();
@SuppressWarnings("unchecked")
@Nullable
public static <T> T get(Bson bson, String key) {
return (T) asMap(bson).get(key);
}
/**
* Return the {@link Bson} object as {@link Map}. Depending on the input type, the return value can be either a casted
* version of {@code bson} or a converted (detached from the original value).
*
* @param bson
* @return
*/
public static Map<String, Object> asMap(Bson bson) {
if (bson instanceof Document) {
@@ -81,6 +95,55 @@ public class BsonUtils {
return (Map) bson.toBsonDocument(Document.class, MongoClientSettings.getDefaultCodecRegistry());
}
/**
* Return the {@link Bson} object as {@link Document}. Depending on the input type, the return value can be either a
* casted version of {@code bson} or a converted (detached from the original value).
*
* @param bson
* @return
* @since 3.2.5
*/
public static Document asDocument(Bson bson) {
if (bson instanceof Document) {
return (Document) bson;
}
Map<String, Object> map = asMap(bson);
if (map instanceof Document) {
return (Document) map;
}
return new Document(map);
}
/**
* Return the {@link Bson} object as mutable {@link Document} containing all entries from {@link Bson}.
*
* @param bson
* @return a mutable {@link Document} containing all entries from {@link Bson}.
* @since 3.2.5
*/
public static Document asMutableDocument(Bson bson) {
if (bson instanceof EmptyDocument) {
bson = new Document(asDocument(bson));
}
if (bson instanceof Document) {
return (Document) bson;
}
Map<String, Object> map = asMap(bson);
if (map instanceof Document) {
return (Document) map;
}
return new Document(map);
}
public static void addToMap(Bson bson, String key, @Nullable Object value) {
if (bson instanceof Document) {

View File

@@ -0,0 +1,95 @@
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.util;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
import org.bson.Document;
import org.jetbrains.annotations.Nullable;
/**
* Empty variant of {@link Document}.
*
* @author Mark Paluch
*/
class EmptyDocument extends Document {
@Override
public Document append(String key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public Object put(String key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public Object remove(Object key) {
throw new UnsupportedOperationException();
}
@Override
public void putAll(Map<? extends String, ?> map) {
throw new UnsupportedOperationException();
}
@Override
public void replaceAll(BiFunction<? super String, ? super Object, ?> function) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public boolean replace(String key, Object oldValue, Object newValue) {
throw new UnsupportedOperationException();
}
@Nullable
@Override
public Object replace(String key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public Set<Entry<String, Object>> entrySet() {
return Collections.emptySet();
}
@Override
public Collection<Object> values() {
return Collections.emptyList();
}
@Override
public Set<String> keySet() {
return Collections.emptySet();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
}

View File

@@ -101,6 +101,7 @@ import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.core.timeseries.Granularity;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.lang.Nullable;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.util.CollectionUtils;
@@ -1071,7 +1072,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonSpELProjection.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(new Document()));
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));
}
@Test // DATAMONGO-1733, DATAMONGO-2041
@@ -1098,7 +1099,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
template.doFind("star-wars", new Document(), new Document(), Person.class, Person.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(new Document()));
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));
}
@Test // DATAMONGO-1733
@@ -1107,7 +1108,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonExtended.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(new Document()));
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));
}
@Test // DATAMONGO-1348, DATAMONGO-2264

View File

@@ -237,11 +237,8 @@ class QueryTests {
source.addCriteria(where("From one make ten").is("and two let be."));
Query target = Query.of(source);
compareQueries(target, source);
source.addCriteria(where("Make even three").is("then rich you'll be."));
assertThat(target.getQueryObject()).isEqualTo(new Document("From one make ten", "and two let be."))
.isNotEqualTo(source.getQueryObject());
assertThat(target.getQueryObject()).containsAllEntriesOf(new Document("From one make ten", "and two let be."))
.isNotSameAs(source.getQueryObject());
}
@Test // DATAMONGO-1783
@@ -353,9 +350,12 @@ class QueryTests {
private void compareQueries(Query actual, Query expected) {
assertThat(actual.getCollation()).isEqualTo(expected.getCollation());
assertThat(actual.getSortObject()).isEqualTo(expected.getSortObject());
assertThat(actual.getFieldsObject()).isEqualTo(expected.getFieldsObject());
assertThat(actual.getQueryObject()).isEqualTo(expected.getQueryObject());
assertThat(actual.getSortObject()).hasSameSizeAs(expected.getSortObject())
.containsAllEntriesOf(expected.getSortObject());
assertThat(actual.getFieldsObject()).hasSameSizeAs(expected.getFieldsObject())
.containsAllEntriesOf(expected.getFieldsObject());
assertThat(actual.getQueryObject()).hasSameSizeAs(expected.getQueryObject())
.containsAllEntriesOf(expected.getQueryObject());
assertThat(actual.getHint()).isEqualTo(expected.getHint());
assertThat(actual.getLimit()).isEqualTo(expected.getLimit());
assertThat(actual.getSkip()).isEqualTo(expected.getSkip());

View File

@@ -31,11 +31,8 @@ import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
@@ -128,7 +125,7 @@ class PartTreeMongoQueryUnitTests {
@Test // DATAMONGO-1345, DATAMONGO-1735
void doesNotDeriveFieldSpecForNormalDomainType() {
assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEqualTo(new Document());
assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEmpty();
}
@Test // DATAMONGO-1345
@@ -173,7 +170,7 @@ class PartTreeMongoQueryUnitTests {
org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findAllBy");
assertThat(query.getFieldsObject()).isEqualTo(new Document());
assertThat(query.getFieldsObject()).isEmpty();
}
@Test // DATAMONGO-1865