Compare commits
54 Commits
issue/reac
...
1.6.2.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
afa05122d2 | ||
|
|
cf06a6527c | ||
|
|
0760a8b425 | ||
|
|
f65cd1d235 | ||
|
|
6f86208111 | ||
|
|
0c33f7e01a | ||
|
|
047df58724 | ||
|
|
9716a93576 | ||
|
|
03e5ebb741 | ||
|
|
6ac908b72e | ||
|
|
5f1049f2de | ||
|
|
07c79e8ba9 | ||
|
|
a94cca3494 | ||
|
|
90842844d4 | ||
|
|
f4c27407ac | ||
|
|
00e1ebb880 | ||
|
|
dd32479ad4 | ||
|
|
ce5fe50550 | ||
|
|
3a04bcb5d3 | ||
|
|
acfe6ccc2c | ||
|
|
3f3ec19364 | ||
|
|
24cbef9bc5 | ||
|
|
b5f74444de | ||
|
|
1b7e077296 | ||
|
|
7ebaf935d4 | ||
|
|
295d7579cb | ||
|
|
a99950d83e | ||
|
|
0bf4ee2711 | ||
|
|
5b8da8dd41 | ||
|
|
b56ca97f68 | ||
|
|
f183b5c7e6 | ||
|
|
1fdde8f0c3 | ||
|
|
b7c3e69653 | ||
|
|
e64d69b8f5 | ||
|
|
d81ed203db | ||
|
|
eae463622c | ||
|
|
6ba8144bca | ||
|
|
9d1c1a9fc5 | ||
|
|
f552ca8073 | ||
|
|
d7bd82c643 | ||
|
|
078cca83e3 | ||
|
|
93ae6815bd | ||
|
|
13dcb8cda1 | ||
|
|
a4497bcf8a | ||
|
|
6a82c47a4d | ||
|
|
8f8f5b7ce4 | ||
|
|
c683813a7a | ||
|
|
dbe983c3cb | ||
|
|
8e11fe84df | ||
|
|
9eb2856840 | ||
|
|
828b379f1f | ||
|
|
161fd8c09d | ||
|
|
dc037dfef6 | ||
|
|
c41653f9da |
12
pom.xml
12
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.6.0.RELEASE</version>
|
||||
<version>1.6.2.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.5.0.RELEASE</version>
|
||||
<version>1.5.2.RELEASE</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -29,9 +29,9 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.9.0.RELEASE</springdata.commons>
|
||||
<mongo>2.12.3</mongo>
|
||||
<mongo.osgi>2.12.3</mongo.osgi>
|
||||
<springdata.commons>1.9.2.RELEASE</springdata.commons>
|
||||
<mongo>2.12.5</mongo>
|
||||
<mongo.osgi>2.12.5</mongo.osgi>
|
||||
</properties>
|
||||
|
||||
<developers>
|
||||
@@ -116,7 +116,7 @@
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.6.0.RELEASE</version>
|
||||
<version>1.6.2.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>1.0.0.Final</jpa>
|
||||
<jpa>2.0.0</jpa>
|
||||
<hibernate>3.6.10.Final</hibernate>
|
||||
</properties>
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.6.0.RELEASE</version>
|
||||
<version>1.6.2.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -59,8 +59,8 @@
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate.javax.persistence</groupId>
|
||||
<artifactId>hibernate-jpa-2.0-api</artifactId>
|
||||
<groupId>org.eclipse.persistence</groupId>
|
||||
<artifactId>javax.persistence</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.6.0.RELEASE</version>
|
||||
<version>1.6.2.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.6.0.RELEASE</version>
|
||||
<version>1.6.2.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,18 +11,19 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.6.0.RELEASE</version>
|
||||
<version>1.6.2.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<validation>1.0.0.GA</validation>
|
||||
<objenesis>1.3</objenesis>
|
||||
<equalsverifier>1.5</equalsverifier>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
@@ -50,7 +51,7 @@
|
||||
<artifactId>spring-expression</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>spring-data-commons</artifactId>
|
||||
@@ -144,6 +145,12 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>nl.jqno.equalsverifier</groupId>
|
||||
<artifactId>equalsverifier</artifactId>
|
||||
<version>${equalsverifier}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -415,7 +415,9 @@ public interface MongoOperations {
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Will consider entity mapping
|
||||
* information to determine the collection the query is ran against.
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
@@ -424,7 +426,9 @@ public interface MongoOperations {
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}.
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
@@ -652,14 +656,28 @@ public interface MongoOperations {
|
||||
long count(Query query, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection.
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. TO make sure the query gets mapped, use {@link #count(Query, Class, String)}.
|
||||
*
|
||||
* @param query
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @see #count(Query, Class, String)
|
||||
*/
|
||||
long count(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
long count(Query query, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <p/>
|
||||
|
||||
@@ -641,7 +641,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
return count(query, null, collectionName);
|
||||
}
|
||||
|
||||
private long count(Query query, Class<?> entityClass, String collectionName) {
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String)
|
||||
*/
|
||||
public long count(Query query, Class<?> entityClass, String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName);
|
||||
final DBObject dbObject = query == null ? null : queryMapper.getMappedObject(query.getQueryObject(),
|
||||
@@ -1007,8 +1011,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Calling update using query: " + queryObj + " and update: " + updateObj + " in collection: "
|
||||
+ collectionName);
|
||||
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName));
|
||||
}
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName,
|
||||
@@ -1187,7 +1191,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { dboq, collection.getName() });
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { serializeToJsonSafely(dboq),
|
||||
collection.getName() });
|
||||
}
|
||||
|
||||
WriteResult wr = writeConcernToUse == null ? collection.remove(dboq) : collection.remove(dboq,
|
||||
@@ -1622,7 +1627,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName));
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback,
|
||||
@@ -1660,8 +1665,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
Class<T> entityClass) {
|
||||
EntityReader<? super T, DBObject> readerToUse = this.mongoConverter;
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndRemove using query: " + query + " fields: " + fields + " sort: " + sort + " for class: "
|
||||
+ entityClass + " in collection: " + collectionName);
|
||||
LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), fields, sort, entityClass, collectionName));
|
||||
}
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
return executeFindOneInternal(new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort),
|
||||
@@ -1685,8 +1690,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
DBObject mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findAndModify using query: " + mappedQuery + " fields: " + fields + " sort: " + sort
|
||||
+ " for class: " + entityClass + " and update: " + mappedUpdate + " in collection: " + collectionName);
|
||||
LOGGER.debug(String.format("findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s "
|
||||
+ "in collection: %s", serializeToJsonSafely(mappedQuery), fields, sort, entityClass,
|
||||
serializeToJsonSafely(mappedUpdate), collectionName));
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options),
|
||||
@@ -1995,13 +2001,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware {
|
||||
public DBObject doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
if (fields == null) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: " + query + " in db.collection: " + collection.getFullName());
|
||||
LOGGER.debug(String.format("findOne using query: %s in db.collection: %s", serializeToJsonSafely(query),
|
||||
collection.getFullName()));
|
||||
}
|
||||
return collection.findOne(query);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: " + query + " fields: " + fields + " in db.collection: "
|
||||
+ collection.getFullName());
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s in db.collection: %s",
|
||||
serializeToJsonSafely(query), fields, collection.getFullName()));
|
||||
}
|
||||
return collection.findOne(query, fields);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,7 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedFi
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -291,6 +292,19 @@ public class Aggregation {
|
||||
return Fields.from(field(name, target));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static GeoNearOperation geoNear(NearQuery query, String distanceField) {
|
||||
return new GeoNearOperation(query, distanceField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions.Builder}.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -84,6 +84,15 @@ public final class Fields implements Iterable<Field> {
|
||||
return new AggregationField(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Field} with the given {@code name} and {@code target}.
|
||||
* <p>
|
||||
* The {@code target} is the name of the backing document field that will be aliased with {@code name}.
|
||||
*
|
||||
* @param name
|
||||
* @param target must not be {@literal null} or empty
|
||||
* @return
|
||||
*/
|
||||
public static Field field(String name, String target) {
|
||||
Assert.hasText(target, "Target must not be null or empty!");
|
||||
return new AggregationField(name, target);
|
||||
@@ -187,15 +196,24 @@ public final class Fields implements Iterable<Field> {
|
||||
private final String target;
|
||||
|
||||
/**
|
||||
* Creates an aggregation field with the given name. As no target is set explicitly, the name will be used as target
|
||||
* as well.
|
||||
* Creates an aggregation field with the given {@code name}.
|
||||
*
|
||||
* @param key
|
||||
* @see AggregationField#AggregationField(String, String).
|
||||
* @param name must not be {@literal null} or empty
|
||||
*/
|
||||
public AggregationField(String key) {
|
||||
this(key, null);
|
||||
public AggregationField(String name) {
|
||||
this(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an aggregation field with the given {@code name} and {@code target}.
|
||||
* <p>
|
||||
* The {@code name} serves as an alias for the actual backing document field denoted by {@code target}. If no target
|
||||
* is set explicitly, the name will be used as target.
|
||||
*
|
||||
* @param name must not be {@literal null} or empty
|
||||
* @param target
|
||||
*/
|
||||
public AggregationField(String name, String target) {
|
||||
|
||||
String nameToSet = cleanUp(name);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,17 +22,33 @@ import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Represents a {@code geoNear} aggregation operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#geoNear(NearQuery, String)} instead of creating
|
||||
* instances of this class directly.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.3
|
||||
*/
|
||||
public class GeoNearOperation implements AggregationOperation {
|
||||
|
||||
private final NearQuery nearQuery;
|
||||
private final String distanceField;
|
||||
|
||||
public GeoNearOperation(NearQuery nearQuery) {
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} from the given {@link NearQuery} and the given distance field. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null}.
|
||||
*/
|
||||
public GeoNearOperation(NearQuery nearQuery, String distanceField) {
|
||||
|
||||
Assert.notNull(nearQuery, "NearQuery must not be null.");
|
||||
Assert.hasLength(distanceField, "Distance field must not be null or empty.");
|
||||
|
||||
Assert.notNull(nearQuery);
|
||||
this.nearQuery = nearQuery;
|
||||
this.distanceField = distanceField;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -41,6 +57,10 @@ public class GeoNearOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
return new BasicDBObject("$geoNear", context.getMappedObject(nearQuery.toDBObject()));
|
||||
|
||||
BasicDBObject command = (BasicDBObject) context.getMappedObject(nearQuery.toDBObject());
|
||||
command.put("distanceField", distanceField);
|
||||
|
||||
return new BasicDBObject("$geoNear", command);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,6 +31,9 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $group}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#group(Fields)} instead of creating instances of this
|
||||
* class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/group/#stage._S_group
|
||||
* @author Sebastian Herold
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,7 +21,10 @@ import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $limit}-operation
|
||||
* Encapsulates the {@code $limit}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#limit(long)} instead of creating instances of this
|
||||
* class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/limit/
|
||||
* @author Thomas Darimont
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,7 +22,11 @@ import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $match}-operation
|
||||
* Encapsulates the {@code $match}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method
|
||||
* {@link Aggregation#match(org.springframework.data.mongodb.core.query.Criteria)} instead of creating instances of this
|
||||
* class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/match/
|
||||
* @author Sebastian Herold
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,10 +28,13 @@ import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $project}-operation. Projection of field to be used in an
|
||||
* {@link Aggregation}. A projection is similar to a {@link Field} inclusion/exclusion but more powerful. It can
|
||||
* generate new fields, change values of given field etc.
|
||||
* Encapsulates the aggregation framework {@code $project}-operation.
|
||||
* <p>
|
||||
* Projection of field to be used in an {@link Aggregation}. A projection is similar to a {@link Field}
|
||||
* inclusion/exclusion but more powerful. It can generate new fields, change values of given field etc.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#project(Fields)} instead of creating instances of
|
||||
* this class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/project/
|
||||
* @author Tobias Trelle
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,9 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $skip}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#skip(int)} instead of creating instances of this
|
||||
* class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/skip/
|
||||
* @author Thomas Darimont
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,9 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $sort}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#sort(Direction, String...)} instead of creating
|
||||
* instances of this class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/sort/#pipe._S_sort
|
||||
* @author Thomas Darimont
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,9 @@ import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $unwind}-operation.
|
||||
* <p>
|
||||
* We recommend to use the static factory method {@link Aggregation#unwind(String)} instead of creating instances of
|
||||
* this class directly.
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/unwind/#pipe._S_unwind
|
||||
* @author Thomas Darimont
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,14 +17,15 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -71,10 +72,13 @@ public class CustomConversions {
|
||||
private final Set<ConvertiblePair> writingPairs;
|
||||
private final Set<Class<?>> customSimpleTypes;
|
||||
private final SimpleTypeHolder simpleTypeHolder;
|
||||
private final ConcurrentMap<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
|
||||
private final List<Object> converters;
|
||||
|
||||
private final Map<ConvertiblePair, CacheValue> customReadTargetTypes;
|
||||
private final Map<ConvertiblePair, CacheValue> customWriteTargetTypes;
|
||||
private final Map<Class<?>, CacheValue> rawWriteTargetTypes;
|
||||
|
||||
/**
|
||||
* Creates an empty {@link CustomConversions} object.
|
||||
*/
|
||||
@@ -94,7 +98,9 @@ public class CustomConversions {
|
||||
this.readingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.writingPairs = new LinkedHashSet<ConvertiblePair>();
|
||||
this.customSimpleTypes = new HashSet<Class<?>>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<GenericConverter.ConvertiblePair, CacheValue>();
|
||||
this.customReadTargetTypes = new ConcurrentHashMap<ConvertiblePair, CacheValue>();
|
||||
this.customWriteTargetTypes = new ConcurrentHashMap<ConvertiblePair, CacheValue>();
|
||||
this.rawWriteTargetTypes = new ConcurrentHashMap<Class<?>, CacheValue>();
|
||||
|
||||
List<Object> toRegister = new ArrayList<Object>();
|
||||
|
||||
@@ -238,70 +244,103 @@ public class CustomConversions {
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType) {
|
||||
return getCustomWriteTarget(sourceType, null);
|
||||
public Class<?> getCustomWriteTarget(final Class<?> sourceType) {
|
||||
|
||||
return getOrCreateAndCache(sourceType, rawWriteTargetTypes, new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, null, writingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the target type we can write an onject of the given source type to. The returned type might be a subclass
|
||||
* oth the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply return the
|
||||
* first target type matching or {@literal null} if no conversion can be found.
|
||||
* Returns the target type we can readTargetWriteLocl an inject of the given source type to. The returned type might
|
||||
* be a subclass of the given expected type though. If {@code expectedTargetType} is {@literal null} we will simply
|
||||
* return the first target type matching or {@literal null} if no conversion can be found.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public Class<?> getCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
public Class<?> getCustomWriteTarget(final Class<?> sourceType, final Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
if (requestedTargetType == null) {
|
||||
return getCustomWriteTarget(sourceType);
|
||||
}
|
||||
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customWriteTargetTypes,
|
||||
new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, requestedTargetType, writingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write into a Mongo native type. The returned type might
|
||||
* be a subclass of the given expected type though.
|
||||
* Returns whether we have a custom conversion registered to readTargetWriteLocl into a Mongo native type. The
|
||||
* returned type might be a subclass of the given expected type though.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return hasCustomWriteTarget(sourceType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to write an object of the given source type into an object
|
||||
* of the given Mongo native target type.
|
||||
* Returns whether we have a custom conversion registered to readTargetWriteLocl an object of the given source type
|
||||
* into an object of the given Mongo native target type.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomWriteTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
return getCustomWriteTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we have a custom conversion registered to read the given source into the given target type.
|
||||
* Returns whether we have a custom conversion registered to readTargetReadLock the given source into the given target
|
||||
* type.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}
|
||||
* @param requestedTargetType must not be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
public boolean hasCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(requestedTargetType);
|
||||
|
||||
return getCustomReadTarget(sourceType, requestedTargetType) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair} for ones that have a source compatible type as source. Additionally
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(final Class<?> sourceType, final Class<?> requestedTargetType) {
|
||||
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return getOrCreateAndCache(new ConvertiblePair(sourceType, requestedTargetType), customReadTargetTypes,
|
||||
new Producer() {
|
||||
|
||||
@Override
|
||||
public Class<?> get() {
|
||||
return getCustomTarget(sourceType, requestedTargetType, readingPairs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects the given {@link ConvertiblePair}s for ones that have a source compatible type as source. Additionally
|
||||
* checks assignability of the target type if one is given.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
@@ -310,11 +349,15 @@ public class CustomConversions {
|
||||
* @return
|
||||
*/
|
||||
private static Class<?> getCustomTarget(Class<?> sourceType, Class<?> requestedTargetType,
|
||||
Iterable<ConvertiblePair> pairs) {
|
||||
Collection<ConvertiblePair> pairs) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
Assert.notNull(pairs);
|
||||
|
||||
if (requestedTargetType != null && pairs.contains(new ConvertiblePair(sourceType, requestedTargetType))) {
|
||||
return requestedTargetType;
|
||||
}
|
||||
|
||||
for (ConvertiblePair typePair : pairs) {
|
||||
if (typePair.getSourceType().isAssignableFrom(sourceType)) {
|
||||
Class<?> targetType = typePair.getTargetType();
|
||||
@@ -328,32 +371,31 @@ public class CustomConversions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the actual target type for the given {@code sourceType} and {@code requestedTargetType}. Note that the
|
||||
* returned {@link Class} could be an assignable type to the given {@code requestedTargetType}.
|
||||
* Will try to find a value for the given key in the given cache or produce one using the given {@link Producer} and
|
||||
* store it in the cache.
|
||||
*
|
||||
* @param sourceType must not be {@literal null}.
|
||||
* @param requestedTargetType can be {@literal null}.
|
||||
* @param key the key to lookup a potentially existing value, must not be {@literal null}.
|
||||
* @param cache the cache to find the value in, must not be {@literal null}.
|
||||
* @param producer the {@link Producer} to create values to cache, must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getCustomReadTarget(Class<?> sourceType, Class<?> requestedTargetType) {
|
||||
private static <T> Class<?> getOrCreateAndCache(T key, Map<T, CacheValue> cache, Producer producer) {
|
||||
|
||||
Assert.notNull(sourceType);
|
||||
CacheValue cacheValue = cache.get(key);
|
||||
|
||||
if (requestedTargetType == null) {
|
||||
return null;
|
||||
if (cacheValue != null) {
|
||||
return cacheValue.getType();
|
||||
}
|
||||
|
||||
ConvertiblePair lookupKey = new ConvertiblePair(sourceType, requestedTargetType);
|
||||
CacheValue readTargetTypeValue = customReadTargetTypes.get(lookupKey);
|
||||
Class<?> type = producer.get();
|
||||
cache.put(key, CacheValue.of(type));
|
||||
|
||||
if (readTargetTypeValue != null) {
|
||||
return readTargetTypeValue.getType();
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
readTargetTypeValue = CacheValue.of(getCustomTarget(sourceType, requestedTargetType, readingPairs));
|
||||
CacheValue cacheValue = customReadTargetTypes.putIfAbsent(lookupKey, readTargetTypeValue);
|
||||
private interface Producer {
|
||||
|
||||
return cacheValue != null ? cacheValue.getType() : readTargetTypeValue.getType();
|
||||
Class<?> get();
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.DBObject;
|
||||
*/
|
||||
class DBObjectAccessor {
|
||||
|
||||
private final DBObject dbObject;
|
||||
private final BasicDBObject dbObject;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DBObjectAccessor} for the given {@link DBObject}.
|
||||
@@ -46,7 +46,7 @@ class DBObjectAccessor {
|
||||
Assert.notNull(dbObject, "DBObject must not be null!");
|
||||
Assert.isInstanceOf(BasicDBObject.class, dbObject, "Given DBObject must be a BasicDBObject!");
|
||||
|
||||
this.dbObject = dbObject;
|
||||
this.dbObject = (BasicDBObject) dbObject;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -62,6 +62,11 @@ class DBObjectAccessor {
|
||||
Assert.notNull(prop, "MongoPersistentProperty must not be null!");
|
||||
String fieldName = prop.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
dbObject.put(fieldName, value);
|
||||
return;
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
DBObject dbObject = this.dbObject;
|
||||
|
||||
@@ -87,12 +92,16 @@ class DBObjectAccessor {
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return this.dbObject.get(fieldName);
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<Object, Object> source = this.dbObject.toMap();
|
||||
Map<String, Object> source = this.dbObject;
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
@@ -108,14 +117,14 @@ class DBObjectAccessor {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<Object, Object> getAsMap(Object source) {
|
||||
private Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return ((DBObject) source).toMap();
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<Object, Object>) source;
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -178,7 +178,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor,
|
||||
Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD;
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD;
|
||||
|
||||
private final DbRefResolverCallback callback;
|
||||
private final MongoPersistentProperty property;
|
||||
@@ -192,6 +192,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@@ -255,6 +256,11 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode(proxy);
|
||||
}
|
||||
|
||||
// DATAMONGO-1076 - finalize methods should not trigger proxy initialization
|
||||
if (FINALIZE_METHOD.equals(method)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
@@ -284,7 +284,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
final MongoPersistentProperty property = association.getInverse();
|
||||
Object value = dbo.get(property.getName());
|
||||
Object value = dbo.get(property.getFieldName());
|
||||
|
||||
if (value == null) {
|
||||
return;
|
||||
@@ -586,7 +586,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
String simpleKey = prepareMapKey(key.toString());
|
||||
dbObject.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
|
||||
} else {
|
||||
@@ -638,12 +638,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected DBObject writeMapInternal(Map<Object, Object> obj, DBObject dbo, TypeInformation<?> propertyType) {
|
||||
|
||||
for (Map.Entry<Object, Object> entry : obj.entrySet()) {
|
||||
|
||||
Object key = entry.getKey();
|
||||
Object val = entry.getValue();
|
||||
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
// Don't use conversion service here as removal of ObjectToString converter results in some primitive types not
|
||||
// being convertable
|
||||
String simpleKey = potentiallyEscapeMapKey(key.toString());
|
||||
|
||||
String simpleKey = prepareMapKey(key);
|
||||
if (val == null || conversions.isSimpleType(val.getClass())) {
|
||||
writeSimpleInternal(val, dbo, simpleKey);
|
||||
} else if (val instanceof Collection || val.getClass().isArray()) {
|
||||
@@ -664,6 +665,21 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the given {@link Map} key to be converted into a {@link String}. Will invoke potentially registered custom
|
||||
* conversions and escape dots from the result as they're not supported as {@link Map} key in MongoDB.
|
||||
*
|
||||
* @param key must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String prepareMapKey(Object key) {
|
||||
|
||||
Assert.notNull(key, "Map key must not be null!");
|
||||
|
||||
String convertedKey = potentiallyConvertMapKey(key);
|
||||
return potentiallyEscapeMapKey(convertedKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Potentially replaces dots in the given map key with the configured map key replacement if configured or aborts
|
||||
* conversion if none is configured.
|
||||
@@ -686,6 +702,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return source.replaceAll("\\.", mapKeyDotReplacement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link String} representation of the given {@link Map} key
|
||||
*
|
||||
* @param key
|
||||
* @return
|
||||
*/
|
||||
private String potentiallyConvertMapKey(Object key) {
|
||||
|
||||
if (key instanceof String) {
|
||||
return (String) key;
|
||||
}
|
||||
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class) ? (String) getPotentiallyConvertedSimpleWrite(key)
|
||||
: key.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates the map key replacements in the given key just read with a dot in case a map key replacement has been
|
||||
* configured.
|
||||
@@ -766,7 +798,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private Object getPotentiallyConvertedSimpleRead(Object value, Class<?> target) {
|
||||
|
||||
if (value == null || target == null) {
|
||||
if (value == null || target == null || target.isAssignableFrom(value.getClass())) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -778,7 +810,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return Enum.valueOf((Class<Enum>) target, value.toString());
|
||||
}
|
||||
|
||||
return target.isAssignableFrom(value.getClass()) ? value : conversionService.convert(value, target);
|
||||
return conversionService.convert(value, target);
|
||||
}
|
||||
|
||||
protected DBRef createDBRef(Object target, MongoPersistentProperty property) {
|
||||
@@ -944,7 +976,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return getPotentiallyConvertedSimpleWrite(obj);
|
||||
}
|
||||
|
||||
TypeInformation<?> typeHint = typeInformation == null ? ClassTypeInformation.OBJECT : typeInformation;
|
||||
TypeInformation<?> typeHint = typeInformation;
|
||||
|
||||
if (obj instanceof BasicDBList) {
|
||||
return maybeConvertList((BasicDBList) obj, typeHint);
|
||||
|
||||
@@ -334,7 +334,8 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = documentField.getPropertyEntity();
|
||||
return entity.hasIdProperty() && entity.getIdProperty().getActualType().isAssignableFrom(type);
|
||||
return entity.hasIdProperty()
|
||||
&& (type.equals(DBRef.class) || entity.getIdProperty().getActualType().isAssignableFrom(type));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -382,10 +383,16 @@ public class QueryMapper {
|
||||
*/
|
||||
protected Object convertAssociation(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (property == null || source == null || source instanceof DBRef || source instanceof DBObject) {
|
||||
if (property == null || source == null || source instanceof DBObject) {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
|
||||
DBRef ref = (DBRef) source;
|
||||
return new DBRef(ref.getDB(), ref.getRef(), convertId(ref.getId()));
|
||||
}
|
||||
|
||||
if (source instanceof Iterable) {
|
||||
BasicDBList result = new BasicDBList();
|
||||
for (Object element : (Iterable<?>) source) {
|
||||
@@ -457,13 +464,20 @@ public class QueryMapper {
|
||||
*/
|
||||
public Object convertId(Object id) {
|
||||
|
||||
try {
|
||||
return conversionService.convert(id, ObjectId.class);
|
||||
} catch (ConversionException e) {
|
||||
// Ignore
|
||||
if (id == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return delegateConvertToMongoType(id, null);
|
||||
if (id instanceof String) {
|
||||
return ObjectId.isValid(id.toString()) ? conversionService.convert(id, ObjectId.class) : id;
|
||||
}
|
||||
|
||||
try {
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService
|
||||
.convert(id, ObjectId.class) : delegateConvertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -785,7 +799,7 @@ public class QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public String getMappedKey() {
|
||||
return path == null ? name : path.toDotPath(getPropertyConverter());
|
||||
return path == null ? name : path.toDotPath(isAssociation() ? getAssociationConverter() : getPropertyConverter());
|
||||
}
|
||||
|
||||
protected PersistentPropertyPath<MongoPersistentProperty> getPath() {
|
||||
@@ -837,5 +851,56 @@ public class QueryMapper {
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link Converter} to use for creating the mapped key of an association. Default implementation is
|
||||
* {@link AssociationConverter}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new AssociationConverter(getAssociation());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converter to skip all properties after an association property was rendered.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
protected static class AssociationConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private boolean associationFound;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public AssociationConverter(Association<MongoPersistentProperty> association) {
|
||||
|
||||
Assert.notNull(association, "Association must not be null!");
|
||||
this.property = association.getInverse();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
|
||||
if (associationFound) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (property.equals(source)) {
|
||||
associationFound = true;
|
||||
}
|
||||
|
||||
return source.getFieldName();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -194,47 +194,48 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return isAssociation() ? new AssociationConverter(getAssociation()) : new UpdatePropertyConverter(key);
|
||||
return new UpdatePropertyConverter(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getAssociationConverter()
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new UpdateAssociationConverter(getAssociation(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converter to skip all properties after an association property was rendered.
|
||||
* Special mapper handling positional parameter {@literal $} within property names.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
*/
|
||||
private static class AssociationConverter implements Converter<MongoPersistentProperty, String> {
|
||||
private static class UpdateKeyMapper {
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private boolean associationFound;
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
protected UpdateKeyMapper(String rawKey) {
|
||||
|
||||
Assert.hasText(rawKey, "Key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(rawKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
public AssociationConverter(Association<MongoPersistentProperty> association) {
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
Assert.notNull(association, "Association must not be null!");
|
||||
this.property = association.getInverse();
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
return iterator.hasNext() && iterator.next().equals("$") ? String.format("%s.$", mappedName) : mappedName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
|
||||
if (associationFound) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (property.equals(source)) {
|
||||
associationFound = true;
|
||||
}
|
||||
|
||||
return source.getFieldName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -242,10 +243,11 @@ public class UpdateMapper extends QueryMapper {
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
private final UpdateKeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
@@ -256,8 +258,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(updateKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
this.mapper = new UpdateKeyMapper(updateKey);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -266,9 +267,37 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
return mapper.mapPropertyName(property);
|
||||
}
|
||||
}
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
return iterator.hasNext() && iterator.next().equals("$") ? String.format("%s.$", mappedName) : mappedName;
|
||||
/**
|
||||
* {@link Converter} retaining positional parameter {@literal $} for {@link Association}s.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
protected static class UpdateAssociationConverter extends AssociationConverter {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public UpdateAssociationConverter(Association<MongoPersistentProperty> association, String key) {
|
||||
|
||||
super(association);
|
||||
this.mapper = new UpdateKeyMapper(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
return super.convert(source) == null ? null : mapper.mapPropertyName(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -220,7 +220,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
private void appendTextIndexInformation(final String dotPath,
|
||||
final TextIndexDefinitionBuilder indexDefinitionBuilder, MongoPersistentEntity<?> entity,
|
||||
final TextIndexDefinitionBuilder indexDefinitionBuilder, final MongoPersistentEntity<?> entity,
|
||||
final TextIndexIncludeOptions includeOptions, final CycleGuard guard) {
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
@@ -230,7 +230,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
guard.protect(persistentProperty, dotPath);
|
||||
|
||||
if (persistentProperty.isLanguageProperty()) {
|
||||
if (persistentProperty.isExplicitLanguageProperty() && !StringUtils.hasText(dotPath)) {
|
||||
indexDefinitionBuilder.withLanguageOverride(persistentProperty.getFieldName());
|
||||
}
|
||||
|
||||
@@ -257,6 +257,10 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
mappingContext.getPersistentEntity(persistentProperty.getActualType()), optionsForNestedType, guard);
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.warn(e.getMessage(), e);
|
||||
} catch (InvalidDataAccessApiUsageException e) {
|
||||
LOGGER.warn(
|
||||
String.format("Potentially invald index structure discovered. Breaking operation for %s.",
|
||||
entity.getName()), e);
|
||||
}
|
||||
} else if (includeOptions.isForce() || indexed != null) {
|
||||
indexDefinitionBuilder.onField(propertyDotPath, weight);
|
||||
@@ -462,8 +466,9 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
for (Path existingPath : paths) {
|
||||
|
||||
if (existingPath.cycles(property, path)) {
|
||||
if (existingPath.cycles(property, path) && property.isEntity()) {
|
||||
paths.add(new Path(property, path));
|
||||
|
||||
throw new CyclicPropertyReferenceException(property.getFieldName(), property.getOwner().getType(),
|
||||
existingPath.getPath());
|
||||
}
|
||||
@@ -536,7 +541,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return false;
|
||||
}
|
||||
|
||||
return path.contains(this.path);
|
||||
return path.equals(this.path) || path.contains(this.path + ".") || path.contains("." + this.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,6 +35,7 @@ import org.springframework.data.mongodb.MongoCollectionUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.ParserContext;
|
||||
import org.springframework.expression.common.LiteralExpression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.expression.spel.support.StandardEvaluationContext;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -53,32 +54,37 @@ import org.springframework.util.StringUtils;
|
||||
public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, MongoPersistentProperty> implements
|
||||
MongoPersistentEntity<T>, ApplicationContextAware {
|
||||
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @DocumentField annotation!";
|
||||
private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @Field annotation!";
|
||||
private static final SpelExpressionParser PARSER = new SpelExpressionParser();
|
||||
|
||||
private final String collection;
|
||||
private final String language;
|
||||
private final SpelExpressionParser parser;
|
||||
|
||||
private final StandardEvaluationContext context;
|
||||
private final Expression expression;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the
|
||||
* collection name to the entities simple type name.
|
||||
*
|
||||
* @param typeInformation
|
||||
* @param typeInformation must not be {@literal null}.
|
||||
*/
|
||||
public BasicMongoPersistentEntity(TypeInformation<T> typeInformation) {
|
||||
|
||||
super(typeInformation, MongoPersistentPropertyComparator.INSTANCE);
|
||||
|
||||
this.parser = new SpelExpressionParser();
|
||||
this.context = new StandardEvaluationContext();
|
||||
|
||||
Class<?> rawType = typeInformation.getType();
|
||||
String fallback = MongoCollectionUtils.getPreferredCollectionName(rawType);
|
||||
|
||||
if (rawType.isAnnotationPresent(Document.class)) {
|
||||
Document d = rawType.getAnnotation(Document.class);
|
||||
this.collection = StringUtils.hasText(d.collection()) ? d.collection() : fallback;
|
||||
this.language = StringUtils.hasText(d.language()) ? d.language() : "";
|
||||
Document document = rawType.getAnnotation(Document.class);
|
||||
|
||||
this.expression = detectExpression(document);
|
||||
this.context = new StandardEvaluationContext();
|
||||
|
||||
if (document != null) {
|
||||
|
||||
this.collection = StringUtils.hasText(document.collection()) ? document.collection() : fallback;
|
||||
this.language = StringUtils.hasText(document.language()) ? document.language() : "";
|
||||
} else {
|
||||
this.collection = fallback;
|
||||
this.language = "";
|
||||
@@ -101,8 +107,7 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getCollection()
|
||||
*/
|
||||
public String getCollection() {
|
||||
Expression expression = parser.parseExpression(collection, ParserContext.TEMPLATE_EXPRESSION);
|
||||
return expression.getValue(context, String.class);
|
||||
return expression == null ? collection : expression.getValue(context, String.class);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -236,6 +241,31 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a SpEL {@link Expression} frór the collection String expressed in the given {@link Document} annotation if
|
||||
* present or {@literal null} otherwise. Will also return {@literal null} it the collection {@link String} evaluates
|
||||
* to a {@link LiteralExpression} (indicating that no subsequent evaluation is necessary).
|
||||
*
|
||||
* @param document can be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private static Expression detectExpression(Document document) {
|
||||
|
||||
if (document == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String collection = document.collection();
|
||||
|
||||
if (!StringUtils.hasText(collection)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Expression expression = PARSER.parseExpression(document.collection(), ParserContext.TEMPLATE_EXPRESSION);
|
||||
|
||||
return expression instanceof LiteralExpression ? null : expression;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handler to collect {@link MongoPersistentProperty} instances and check that each of them is mapped to a distinct
|
||||
* field name.
|
||||
@@ -284,7 +314,7 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
|
||||
private void potentiallyAssertLanguageType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isLanguageProperty()) {
|
||||
if (persistentProperty.isExplicitLanguageProperty()) {
|
||||
assertPropertyType(persistentProperty, String.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,9 +190,18 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
*/
|
||||
@Override
|
||||
public boolean isLanguageProperty() {
|
||||
return getFieldName().equals(LANGUAGE_FIELD_NAME) || isAnnotationPresent(Language.class);
|
||||
return getFieldName().equals(LANGUAGE_FIELD_NAME) || isExplicitLanguageProperty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isExplicitLanguageProperty()
|
||||
*/
|
||||
@Override
|
||||
public boolean isExplicitLanguageProperty() {
|
||||
return isAnnotationPresent(Language.class);
|
||||
};
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isTextScoreProperty()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,6 +31,8 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
private Boolean isIdProperty;
|
||||
private Boolean isAssociation;
|
||||
private String fieldName;
|
||||
private Boolean usePropertyAccess;
|
||||
private Boolean isTransient;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CachingMongoPersistentProperty}.
|
||||
@@ -85,4 +87,32 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#usePropertyAccess()
|
||||
*/
|
||||
@Override
|
||||
public boolean usePropertyAccess() {
|
||||
|
||||
if (this.usePropertyAccess == null) {
|
||||
this.usePropertyAccess = super.usePropertyAccess();
|
||||
}
|
||||
|
||||
return this.usePropertyAccess;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#isTransient()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransient() {
|
||||
|
||||
if (this.isTransient == null) {
|
||||
this.isTransient = super.isTransient();
|
||||
}
|
||||
|
||||
return this.isTransient;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,14 +61,22 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
boolean isExplicitIdProperty();
|
||||
|
||||
/**
|
||||
* Returns whether the property indicates the documents language either by having a {@link #getFieldName()} equal to
|
||||
* {@literal language} or being annotated with {@link Language}.
|
||||
* Returns true whether the property indicates the documents language either by having a {@link #getFieldName()} equal
|
||||
* to {@literal language} or being annotated with {@link Language}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
boolean isLanguageProperty();
|
||||
|
||||
/**
|
||||
* Returns true when property being annotated with {@link Language}.
|
||||
*
|
||||
* @return
|
||||
* @since 1.6.1
|
||||
*/
|
||||
boolean isExplicitLanguageProperty();
|
||||
|
||||
/**
|
||||
* Returns whether the property holds the documents score calculated by text search. <br/>
|
||||
* It's marked with {@link TextScore}.
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.springframework.util.ObjectUtils.*;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
@@ -25,6 +27,7 @@ import com.mongodb.util.JSON;
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class BasicQuery extends Query {
|
||||
|
||||
@@ -97,4 +100,42 @@ public class BasicQuery extends Query {
|
||||
protected void setFieldsObject(DBObject fieldsObject) {
|
||||
this.fieldsObject = fieldsObject;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.Query#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(o instanceof BasicQuery)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BasicQuery that = (BasicQuery) o;
|
||||
|
||||
return querySettingsEquals(that) && //
|
||||
nullSafeEquals(fieldsObject, that.fieldsObject) && //
|
||||
nullSafeEquals(queryObject, that.queryObject) && //
|
||||
nullSafeEquals(sortObject, that.sortObject);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.Query#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = super.hashCode();
|
||||
result = 31 * result + nullSafeHashCode(queryObject);
|
||||
result = 31 * result + nullSafeHashCode(fieldsObject);
|
||||
result = 31 * result + nullSafeHashCode(sortObject);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -87,12 +88,8 @@ public class BasicUpdate extends Update {
|
||||
|
||||
@Override
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
DBObject keyValue = new BasicDBObject();
|
||||
keyValue.put(key, convertedValues);
|
||||
keyValue.put(key, Arrays.copyOf(values, values.length));
|
||||
updateObject.put("$pullAll", keyValue);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -31,7 +31,9 @@ import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -515,8 +517,11 @@ public class Criteria implements CriteriaDefinition {
|
||||
* @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getCriteriaObject()
|
||||
*/
|
||||
public DBObject getCriteriaObject() {
|
||||
|
||||
if (this.criteriaChain.size() == 1) {
|
||||
return criteriaChain.get(0).getSingleCriteriaObject();
|
||||
} else if (CollectionUtils.isEmpty(this.criteriaChain) && !CollectionUtils.isEmpty(this.criteria)) {
|
||||
return getSingleCriteriaObject();
|
||||
} else {
|
||||
DBObject criteriaObject = new BasicDBObject();
|
||||
for (Criteria c : this.criteriaChain) {
|
||||
@@ -550,6 +555,13 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(this.key)) {
|
||||
if (not) {
|
||||
return new BasicDBObject("$not", dbo);
|
||||
}
|
||||
return dbo;
|
||||
}
|
||||
|
||||
DBObject queryCriteria = new BasicDBObject();
|
||||
|
||||
if (!NOT_SET.equals(isValue)) {
|
||||
|
||||
@@ -385,12 +385,21 @@ public class Query {
|
||||
return false;
|
||||
}
|
||||
|
||||
Query that = (Query) obj;
|
||||
return querySettingsEquals((Query) obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether the settings of the given {@link Query} are equal to this query.
|
||||
*
|
||||
* @param that
|
||||
* @return
|
||||
*/
|
||||
protected boolean querySettingsEquals(Query that) {
|
||||
|
||||
boolean criteriaEqual = this.criteria.equals(that.criteria);
|
||||
boolean fieldsEqual = this.fieldSpec == null ? that.fieldSpec == null : this.fieldSpec.equals(that.fieldSpec);
|
||||
boolean sortEqual = this.sort == null ? that.sort == null : this.sort.equals(that.sort);
|
||||
boolean hintEqual = this.hint == null ? that.hint == null : this.hint.equals(that.hint);
|
||||
boolean fieldsEqual = nullSafeEquals(this.fieldSpec, that.fieldSpec);
|
||||
boolean sortEqual = nullSafeEquals(this.sort, that.sort);
|
||||
boolean hintEqual = nullSafeEquals(this.hint, that.hint);
|
||||
boolean skipEqual = this.skip == that.skip;
|
||||
boolean limitEqual = this.limit == that.limit;
|
||||
boolean metaEqual = nullSafeEquals(this.meta, that.meta);
|
||||
|
||||
@@ -189,12 +189,7 @@ public class Update {
|
||||
* @return
|
||||
*/
|
||||
public Update pushAll(String key, Object[] values) {
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
addMultiFieldOperation("$pushAll", key, convertedValues);
|
||||
addMultiFieldOperation("$pushAll", key, Arrays.copyOf(values, values.length));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -258,12 +253,7 @@ public class Update {
|
||||
* @return
|
||||
*/
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
addFieldOperation("$pullAll", key, convertedValues);
|
||||
addFieldOperation("$pullAll", key, Arrays.copyOf(values, values.length));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -495,13 +485,7 @@ public class Update {
|
||||
return ((Collection<?>) values[0]).toArray();
|
||||
}
|
||||
|
||||
Object[] convertedValues = new Object[values.length];
|
||||
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
convertedValues[i] = values[i];
|
||||
}
|
||||
|
||||
return convertedValues;
|
||||
return Arrays.copyOf(values, values.length);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -27,6 +27,7 @@ import org.springframework.data.repository.PagingAndSortingRepository;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@NoRepositoryBean
|
||||
public interface MongoRepository<T, ID extends Serializable> extends PagingAndSortingRepository<T, ID> {
|
||||
@@ -48,4 +49,26 @@ public interface MongoRepository<T, ID extends Serializable> extends PagingAndSo
|
||||
* @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Sort)
|
||||
*/
|
||||
List<T> findAll(Sort sort);
|
||||
|
||||
/**
|
||||
* Inserts the given a given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use
|
||||
* the returned instance for further operations as the save operation might have changed the entity instance
|
||||
* completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API.
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @return the saved entity
|
||||
* @since 1.7
|
||||
*/
|
||||
<S extends T> S insert(S entity);
|
||||
|
||||
/**
|
||||
* Inserts the given entities. Assumes the given entities to have not been persisted yet and thus will optimize the
|
||||
* insert over a call to {@link #save(Iterable)}. Prefer using {@link #save(Iterable)} to avoid the usage of store
|
||||
* specific API.
|
||||
*
|
||||
* @param entities must not be {@literal null}.
|
||||
* @return the saved entities
|
||||
* @since 1.7
|
||||
*/
|
||||
<S extends T> List<S> insert(Iterable<S> entities);
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ import java.util.List;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
@@ -88,39 +87,25 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
|
||||
Object result = null;
|
||||
|
||||
if (isDeleteQuery()) {
|
||||
result = new DeleteExecution().execute(query);
|
||||
return new DeleteExecution().execute(query);
|
||||
} else if (method.isGeoNearQuery() && method.isPageQuery()) {
|
||||
|
||||
MongoParameterAccessor countAccessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query countQuery = createCountQuery(new ConvertingParameterAccessor(operations.getConverter(), countAccessor));
|
||||
|
||||
result = new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
return new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
} else if (method.isGeoNearQuery()) {
|
||||
result = new GeoNearExecution(accessor).execute(query);
|
||||
return new GeoNearExecution(accessor).execute(query);
|
||||
} else if (method.isSliceQuery()) {
|
||||
result = new SlicedExecution(accessor.getPageable()).execute(query);
|
||||
return new SlicedExecution(accessor.getPageable()).execute(query);
|
||||
} else if (method.isCollectionQuery()) {
|
||||
result = new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
return new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
} else if (method.isPageQuery()) {
|
||||
result = new PagedExecution(accessor.getPageable()).execute(query);
|
||||
return new PagedExecution(accessor.getPageable()).execute(query);
|
||||
} else {
|
||||
result = new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
return new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
}
|
||||
|
||||
if (result == null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Class<?> expectedReturnType = method.getReturnType().getType();
|
||||
|
||||
if (expectedReturnType.isAssignableFrom(result.getClass())) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return CONVERSION_SERVICE.convert(result, expectedReturnType);
|
||||
}
|
||||
|
||||
private Query applyQueryMetaAttributesWhenPresent(Query query) {
|
||||
@@ -211,6 +196,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
* {@link Execution} for {@link Slice} query methods.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
|
||||
@@ -232,9 +218,11 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
int pageSize = pageable.getPageSize();
|
||||
Pageable slicePageable = new PageRequest(pageable.getPageNumber(), pageSize + 1, pageable.getSort());
|
||||
|
||||
List result = operations.find(query.with(slicePageable), metadata.getJavaType(), metadata.getCollectionName());
|
||||
// Apply Pageable but tweak limit to peek into next page
|
||||
Query modifiedQuery = query.with(pageable).limit(pageSize + 1);
|
||||
|
||||
List result = operations.find(modifiedQuery, metadata.getJavaType(), metadata.getCollectionName());
|
||||
|
||||
boolean hasNext = result.size() > pageSize;
|
||||
|
||||
@@ -271,9 +259,11 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
String collectionName = metadata.getCollectionName();
|
||||
Class<?> type = metadata.getJavaType();
|
||||
|
||||
int overallLimit = query.getLimit();
|
||||
long count = operations.count(query, metadata.getCollectionName());
|
||||
long count = operations.count(query, type, collectionName);
|
||||
count = overallLimit != 0 ? Math.min(count, query.getLimit()) : count;
|
||||
|
||||
boolean pageableOutOfScope = pageable.getOffset() > count;
|
||||
@@ -290,7 +280,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
query.limit(overallLimit - pageable.getOffset());
|
||||
}
|
||||
|
||||
List<?> result = operations.find(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
List<?> result = operations.find(query, type, collectionName);
|
||||
return new PageImpl(result, pageable, count);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,6 +25,7 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Shape;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -46,6 +47,7 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
@@ -102,9 +104,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = context.getPersistentPropertyPath(part.getProperty());
|
||||
MongoPersistentProperty property = path.getLeafProperty();
|
||||
Criteria criteria = from(part, property,
|
||||
where(path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)),
|
||||
(PotentiallyConvertingIterator) iterator);
|
||||
Criteria criteria = from(part, property, where(path.toDotPath()), (PotentiallyConvertingIterator) iterator);
|
||||
|
||||
return criteria;
|
||||
}
|
||||
@@ -123,9 +123,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
PersistentPropertyPath<MongoPersistentProperty> path = context.getPersistentPropertyPath(part.getProperty());
|
||||
MongoPersistentProperty property = path.getLeafProperty();
|
||||
|
||||
return from(part, property,
|
||||
base.and(path.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)),
|
||||
(PotentiallyConvertingIterator) iterator);
|
||||
return from(part, property, base.and(path.toDotPath()), (PotentiallyConvertingIterator) iterator);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -194,7 +192,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
case CONTAINING:
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
return createContainingCriteria(part, property, criteria, parameters);
|
||||
case REGEX:
|
||||
return criteria.regex(parameters.next().toString());
|
||||
case EXISTS:
|
||||
@@ -212,7 +210,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
if (distance == null) {
|
||||
return criteria.near(point);
|
||||
} else {
|
||||
if (distance.getMetric() != null) {
|
||||
if (!Metrics.NEUTRAL.equals(distance.getMetric())) {
|
||||
criteria.nearSphere(point);
|
||||
} else {
|
||||
criteria.near(point);
|
||||
@@ -288,6 +286,27 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
Arrays.asList(IgnoreCaseType.ALWAYS, IgnoreCaseType.WHEN_POSSIBLE), part.shouldIgnoreCase()));
|
||||
}
|
||||
|
||||
/**
|
||||
* If the target property of the comparison is of type String, then the operator checks for match using regular
|
||||
* expression. If the target property of the comparison is a {@link Collection} then the operator evaluates to true if
|
||||
* it finds an exact match within any member of the {@link Collection}.
|
||||
*
|
||||
* @param part
|
||||
* @param property
|
||||
* @param criteria
|
||||
* @param parameters
|
||||
* @return
|
||||
*/
|
||||
private Criteria createContainingCriteria(Part part, MongoPersistentProperty property, Criteria criteria,
|
||||
PotentiallyConvertingIterator parameters) {
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return criteria.in(nextAsArray(parameters, property));
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next().toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an appropriate like-regex and appends it to the given criteria.
|
||||
*
|
||||
|
||||
@@ -128,8 +128,7 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
MongoPersistentEntity<?> collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity
|
||||
: managedEntity;
|
||||
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(),
|
||||
collectionEntity.getCollection());
|
||||
this.metadata = new SimpleMongoEntityMetadata<Object>((Class<Object>) returnedEntity.getType(), collectionEntity);
|
||||
}
|
||||
|
||||
return this.metadata;
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -25,21 +26,22 @@ import org.springframework.util.Assert;
|
||||
class SimpleMongoEntityMetadata<T> implements MongoEntityMetadata<T> {
|
||||
|
||||
private final Class<T> type;
|
||||
private final String collectionName;
|
||||
private final MongoPersistentEntity<?> collectionEntity;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoEntityMetadata} using the given type and collection name.
|
||||
* Creates a new {@link SimpleMongoEntityMetadata} using the given type and {@link MongoPersistentEntity} to use for
|
||||
* collection lookups.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @param collectionEntity must not be {@literal null} or empty.
|
||||
*/
|
||||
public SimpleMongoEntityMetadata(Class<T> type, String collectionName) {
|
||||
public SimpleMongoEntityMetadata(Class<T> type, MongoPersistentEntity<?> collectionEntity) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(collectionEntity, "Collection entity must not be null or empty!");
|
||||
|
||||
this.type = type;
|
||||
this.collectionName = collectionName;
|
||||
this.collectionEntity = collectionEntity;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -55,6 +57,6 @@ class SimpleMongoEntityMetadata<T> implements MongoEntityMetadata<T> {
|
||||
* @see org.springframework.data.mongodb.repository.query.MongoEntityMetadata#getCollectionName()
|
||||
*/
|
||||
public String getCollectionName() {
|
||||
return collectionName;
|
||||
return collectionEntity.getCollection();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
/**
|
||||
@@ -199,6 +200,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
* {@link Collections#emptyList()}.
|
||||
*
|
||||
* @param input
|
||||
* @param conversionService must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public List<ParameterBinding> parseParameterBindingsFrom(String input) {
|
||||
@@ -229,14 +231,7 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
if (value instanceof String) {
|
||||
|
||||
String string = ((String) value).trim();
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(string);
|
||||
while (valueMatcher.find()) {
|
||||
int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP));
|
||||
boolean quoted = (string.startsWith("'") && string.endsWith("'"))
|
||||
|| (string.startsWith("\"") && string.endsWith("\""));
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
potentiallyAddBinding(string, bindings);
|
||||
|
||||
} else if (value instanceof Pattern) {
|
||||
|
||||
@@ -255,15 +250,37 @@ public class StringBasedMongoQuery extends AbstractMongoQuery {
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
|
||||
} else if (value instanceof DBRef) {
|
||||
|
||||
DBRef dbref = (DBRef) value;
|
||||
|
||||
potentiallyAddBinding(dbref.getRef(), bindings);
|
||||
potentiallyAddBinding(dbref.getId().toString(), bindings);
|
||||
|
||||
} else if (value instanceof DBObject) {
|
||||
|
||||
DBObject dbo = (DBObject) value;
|
||||
|
||||
for (String field : dbo.keySet()) {
|
||||
collectParameterReferencesIntoBindings(bindings, field);
|
||||
collectParameterReferencesIntoBindings(bindings, dbo.get(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void potentiallyAddBinding(String source, List<ParameterBinding> bindings) {
|
||||
|
||||
Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(source);
|
||||
|
||||
while (valueMatcher.find()) {
|
||||
|
||||
int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP));
|
||||
boolean quoted = (source.startsWith("'") && source.endsWith("'"))
|
||||
|| (source.startsWith("\"") && source.endsWith("\""));
|
||||
|
||||
bindings.add(new ParameterBinding(paramIndex, quoted));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,8 +27,10 @@ import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
|
||||
import org.springframework.data.querydsl.EntityPathResolver;
|
||||
import org.springframework.data.querydsl.QSort;
|
||||
import org.springframework.data.querydsl.QueryDslPredicateExecutor;
|
||||
import org.springframework.data.querydsl.SimpleEntityPathResolver;
|
||||
import org.springframework.data.repository.core.EntityInformation;
|
||||
import org.springframework.data.repository.core.EntityMetadata;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -43,18 +45,21 @@ import com.mysema.query.types.path.PathBuilder;
|
||||
* Special QueryDsl based repository implementation that allows execution {@link Predicate}s in various forms.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleMongoRepository<T, ID> implements
|
||||
QueryDslPredicateExecutor<T> {
|
||||
|
||||
private final PathBuilder<T> builder;
|
||||
private final EntityInformation<T, ID> entityInformation;
|
||||
private final MongoOperations mongoOperations;
|
||||
|
||||
/**
|
||||
* Creates a new {@link QueryDslMongoRepository} for the given {@link EntityMetadata} and {@link MongoTemplate}. Uses
|
||||
* the {@link SimpleEntityPathResolver} to create an {@link EntityPath} for the given domain class.
|
||||
*
|
||||
* @param entityInformation
|
||||
* @param template
|
||||
* @param entityInformation must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
*/
|
||||
public QueryDslMongoRepository(MongoEntityInformation<T, ID> entityInformation, MongoOperations mongoOperations) {
|
||||
this(entityInformation, mongoOperations, SimpleEntityPathResolver.INSTANCE);
|
||||
@@ -64,17 +69,21 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* Creates a new {@link QueryDslMongoRepository} for the given {@link MongoEntityInformation}, {@link MongoTemplate}
|
||||
* and {@link EntityPathResolver}.
|
||||
*
|
||||
* @param entityInformation
|
||||
* @param mongoOperations
|
||||
* @param resolver
|
||||
* @param entityInformation must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param resolver must not be {@literal null}.
|
||||
*/
|
||||
public QueryDslMongoRepository(MongoEntityInformation<T, ID> entityInformation, MongoOperations mongoOperations,
|
||||
EntityPathResolver resolver) {
|
||||
|
||||
super(entityInformation, mongoOperations);
|
||||
|
||||
Assert.notNull(resolver);
|
||||
EntityPath<T> path = resolver.createPath(entityInformation.getJavaType());
|
||||
|
||||
this.builder = new PathBuilder<T>(path.getType(), path.getMetadata());
|
||||
this.entityInformation = entityInformation;
|
||||
this.mongoOperations = mongoOperations;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -98,7 +107,6 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @see org.springframework.data.querydsl.QueryDslPredicateExecutor#findAll(com.mysema.query.types.Predicate, com.mysema.query.types.OrderSpecifier<?>[])
|
||||
*/
|
||||
public List<T> findAll(Predicate predicate, OrderSpecifier<?>... orders) {
|
||||
|
||||
return createQueryFor(predicate).orderBy(orders).list();
|
||||
}
|
||||
|
||||
@@ -114,6 +122,28 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
return new PageImpl<T>(applyPagination(query, pageable).list(), pageable, countQuery.count());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.support.SimpleMongoRepository#findAll(org.springframework.data.domain.Pageable)
|
||||
*/
|
||||
@Override
|
||||
public Page<T> findAll(Pageable pageable) {
|
||||
|
||||
MongodbQuery<T> countQuery = createQuery();
|
||||
MongodbQuery<T> query = createQuery();
|
||||
|
||||
return new PageImpl<T>(applyPagination(query, pageable).list(), pageable, countQuery.count());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.support.SimpleMongoRepository#findAll(org.springframework.data.domain.Sort)
|
||||
*/
|
||||
@Override
|
||||
public List<T> findAll(Sort sort) {
|
||||
return applySorting(createQuery(), sort).list();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.querydsl.QueryDslPredicateExecutor#count(com.mysema.query.types.Predicate)
|
||||
@@ -129,11 +159,16 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
* @return
|
||||
*/
|
||||
private MongodbQuery<T> createQueryFor(Predicate predicate) {
|
||||
return createQuery().where(predicate);
|
||||
}
|
||||
|
||||
Class<T> domainType = getEntityInformation().getJavaType();
|
||||
|
||||
MongodbQuery<T> query = new SpringDataMongodbQuery<T>(getMongoOperations(), domainType);
|
||||
return query.where(predicate);
|
||||
/**
|
||||
* Creates a {@link MongodbQuery}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private MongodbQuery<T> createQuery() {
|
||||
return new SpringDataMongodbQuery<T>(mongoOperations, entityInformation.getJavaType());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -166,6 +201,15 @@ public class QueryDslMongoRepository<T, ID extends Serializable> extends SimpleM
|
||||
return query;
|
||||
}
|
||||
|
||||
// TODO: find better solution than instanceof check
|
||||
if (sort instanceof QSort) {
|
||||
|
||||
List<OrderSpecifier<?>> orderSpecifiers = ((QSort) sort).getOrderSpecifiers();
|
||||
query.orderBy(orderSpecifiers.toArray(new OrderSpecifier<?>[orderSpecifiers.size()]));
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
for (Order order : sort) {
|
||||
query.orderBy(toOrder(order));
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
@@ -41,6 +42,7 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class SimpleMongoRepository<T, ID extends Serializable> implements MongoRepository<T, ID> {
|
||||
|
||||
@@ -48,7 +50,7 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
private final MongoEntityInformation<T, ID> entityInformation;
|
||||
|
||||
/**
|
||||
* Creates a ew {@link SimpleMongoRepository} for the given {@link MongoEntityInformation} and {@link MongoTemplate}.
|
||||
* Creates a new {@link SimpleMongoRepository} for the given {@link MongoEntityInformation} and {@link MongoTemplate}.
|
||||
*
|
||||
* @param metadata must not be {@literal null}.
|
||||
* @param template must not be {@literal null}.
|
||||
@@ -70,7 +72,12 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
|
||||
Assert.notNull(entity, "Entity must not be null!");
|
||||
|
||||
mongoOperations.save(entity, entityInformation.getCollectionName());
|
||||
if (entityInformation.isNew(entity)) {
|
||||
mongoOperations.insert(entity, entityInformation.getCollectionName());
|
||||
} else {
|
||||
mongoOperations.save(entity, entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
return entity;
|
||||
}
|
||||
|
||||
@@ -82,11 +89,22 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
|
||||
Assert.notNull(entities, "The given Iterable of entities not be null!");
|
||||
|
||||
List<S> result = new ArrayList<S>();
|
||||
List<S> result = convertIterableToList(entities);
|
||||
boolean allNew = true;
|
||||
|
||||
for (S entity : entities) {
|
||||
save(entity);
|
||||
result.add(entity);
|
||||
if (allNew && !entityInformation.isNew(entity)) {
|
||||
allNew = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (allNew) {
|
||||
mongoOperations.insertAll(result);
|
||||
} else {
|
||||
|
||||
for (S entity : result) {
|
||||
save(entity);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -181,7 +199,7 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
*/
|
||||
public Iterable<T> findAll(Iterable<ID> ids) {
|
||||
|
||||
Set<ID> parameters = new HashSet<ID>();
|
||||
Set<ID> parameters = new HashSet<ID>(tryDetermineRealSizeOrReturn(ids, 10));
|
||||
for (ID id : ids) {
|
||||
parameters.add(id);
|
||||
}
|
||||
@@ -209,6 +227,38 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
return findAll(new Query().with(sort));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.MongoRepository#insert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> S insert(S entity) {
|
||||
|
||||
Assert.notNull(entity, "Entity must not be null!");
|
||||
|
||||
mongoOperations.insert(entity, entityInformation.getCollectionName());
|
||||
return entity;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.MongoRepository#insert(java.lang.Iterable)
|
||||
*/
|
||||
@Override
|
||||
public <S extends T> List<S> insert(Iterable<S> entities) {
|
||||
|
||||
Assert.notNull(entities, "The given Iterable of entities not be null!");
|
||||
|
||||
List<S> list = convertIterableToList(entities);
|
||||
|
||||
if (list.isEmpty()) {
|
||||
return list;
|
||||
}
|
||||
|
||||
mongoOperations.insertAll(list);
|
||||
return list;
|
||||
}
|
||||
|
||||
private List<T> findAll(Query query) {
|
||||
|
||||
if (query == null) {
|
||||
@@ -218,20 +268,27 @@ public class SimpleMongoRepository<T, ID extends Serializable> implements MongoR
|
||||
return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying {@link MongoOperations} instance.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected MongoOperations getMongoOperations() {
|
||||
return this.mongoOperations;
|
||||
private static <T> List<T> convertIterableToList(Iterable<T> entities) {
|
||||
|
||||
if (entities instanceof List) {
|
||||
return (List<T>) entities;
|
||||
}
|
||||
|
||||
int capacity = tryDetermineRealSizeOrReturn(entities, 10);
|
||||
|
||||
if (capacity == 0 || entities == null) {
|
||||
return Collections.<T> emptyList();
|
||||
}
|
||||
|
||||
List<T> list = new ArrayList<T>(capacity);
|
||||
for (T entity : entities) {
|
||||
list.add(entity);
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the entityInformation
|
||||
*/
|
||||
protected MongoEntityInformation<T, ID> getEntityInformation() {
|
||||
return entityInformation;
|
||||
private static int tryDetermineRealSizeOrReturn(Iterable<?> iterable, int defaultSize) {
|
||||
return iterable == null ? 0 : (iterable instanceof Collection) ? ((Collection<?>) iterable).size() : defaultSize;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -41,7 +44,17 @@ import com.mysema.query.types.PathType;
|
||||
*/
|
||||
class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
private final String ID_KEY = "_id";
|
||||
private static final String ID_KEY = "_id";
|
||||
private static final Set<PathType> PATH_TYPES;
|
||||
|
||||
static {
|
||||
|
||||
Set<PathType> pathTypes = new HashSet<PathType>();
|
||||
pathTypes.add(PathType.VARIABLE);
|
||||
pathTypes.add(PathType.PROPERTY);
|
||||
|
||||
PATH_TYPES = Collections.unmodifiableSet(pathTypes);
|
||||
}
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -138,7 +151,7 @@ class SpringDataMongodbSerializer extends MongodbSerializer {
|
||||
|
||||
Path<?> parent = path.getMetadata().getParent();
|
||||
|
||||
if (parent == null) {
|
||||
if (parent == null || !PATH_TYPES.contains(path.getMetadata().getPathType())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2013 the original author or authors.
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,6 +23,7 @@ import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@@ -49,11 +50,17 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-454
|
||||
* @see DATAMONGO-1062
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void rejectsAddressConfigWithoutASingleParsableServerAddress() {
|
||||
public void rejectsAddressConfigWithoutASingleParsableAndResolvableServerAddress() {
|
||||
|
||||
editor.setAsText("foo, bar");
|
||||
String unknownHost1 = "gugu.nonexistant.example.org";
|
||||
String unknownHost2 = "gaga.nonexistant.example.org";
|
||||
|
||||
assertUnresolveableHostnames(unknownHost1, unknownHost2);
|
||||
|
||||
editor.setAsText(unknownHost1 + "," + unknownHost2);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -193,4 +200,16 @@ public class ServerAddressPropertyEditorUnitTests {
|
||||
assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress), port)));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertUnresolveableHostnames(String... hostnames) {
|
||||
|
||||
for (String hostname : hostnames) {
|
||||
try {
|
||||
InetAddress.getByName(hostname);
|
||||
Assert.fail("Supposedly unresolveable hostname '" + hostname + "' can be resolved.");
|
||||
} catch (UnknownHostException expected) {
|
||||
// ok
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2791,6 +2791,7 @@ public class MongoTemplateTests {
|
||||
|
||||
@Id public String id;
|
||||
|
||||
@Field("db_ref_list")/** @see DATAMONGO-1058 */
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
public List<Sample> dbRefAnnotatedList;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -47,11 +47,14 @@ import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationTests.CarDescriptor.Entry;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.Person;
|
||||
import org.springframework.data.util.Version;
|
||||
@@ -120,6 +123,7 @@ public class AggregationTests {
|
||||
mongoTemplate.dropCollection(User.class);
|
||||
mongoTemplate.dropCollection(Person.class);
|
||||
mongoTemplate.dropCollection(Reservation.class);
|
||||
mongoTemplate.dropCollection(Venue.class);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1018,6 +1022,30 @@ public class AggregationTests {
|
||||
assertThat(dbo.get("dayOfYearPlus1DayManually"), is((Object) dateTime.plusDays(1).getDayOfYear()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1127
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportGeoNearQueriesForAggregationWithDistanceField() {
|
||||
|
||||
mongoTemplate.insert(new Venue("Penn Station", -73.99408, 40.75057));
|
||||
mongoTemplate.insert(new Venue("10gen Office", -73.99171, 40.738868));
|
||||
mongoTemplate.insert(new Venue("Flatiron Building", -73.988135, 40.741404));
|
||||
|
||||
mongoTemplate.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location"));
|
||||
|
||||
NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(150);
|
||||
|
||||
Aggregation agg = newAggregation(Aggregation.geoNear(geoNear, "distance"));
|
||||
AggregationResults<DBObject> result = mongoTemplate.aggregate(agg, Venue.class, DBObject.class);
|
||||
|
||||
assertThat(result.getMappedResults(), hasSize(3));
|
||||
|
||||
DBObject firstResult = result.getMappedResults().get(0);
|
||||
assertThat(firstResult.containsField("distance"), is(true));
|
||||
assertThat((Double) firstResult.get("distance"), closeTo(117.620092203928, 0.00001));
|
||||
}
|
||||
|
||||
private void assertLikeStats(LikeStats like, String id, long count) {
|
||||
|
||||
assertThat(like, is(notNullValue()));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,23 +22,30 @@ import org.junit.Test;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link GeoNearOperation}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class GeoNearOperationUnitTests {
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1127
|
||||
*/
|
||||
@Test
|
||||
public void rendersNearQueryAsAggregationOperation() {
|
||||
|
||||
NearQuery query = NearQuery.near(10.0, 10.0);
|
||||
GeoNearOperation operation = new GeoNearOperation(query);
|
||||
GeoNearOperation operation = new GeoNearOperation(query, "distance");
|
||||
DBObject dbObject = operation.toDBObject(Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
DBObject nearClause = DBObjectTestUtils.getAsDBObject(dbObject, "$geoNear");
|
||||
assertThat(nearClause, is(query.toDBObject()));
|
||||
|
||||
DBObject expected = (DBObject) new BasicDBObject(query.toDBObject().toMap()).append("distanceField", "distance");
|
||||
assertThat(nearClause, is(expected));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -541,6 +541,26 @@ public class DbRefMappingMongoConverterUnitTests {
|
||||
assertProxyIsResolved(proxy, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1076
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(WithObjectMethodOverrideLazyDbRefs.class);
|
||||
MongoPersistentProperty property = entity.getPersistentProperty("dbRefToConcreteTypeWithPropertyAccess");
|
||||
|
||||
String idValue = new ObjectId().toString();
|
||||
DBRef dbRef = converter.toDBRef(new LazyDbRefTargetPropertyAccess(idValue), property);
|
||||
|
||||
WithObjectMethodOverrideLazyDbRefs result = converter.read(WithObjectMethodOverrideLazyDbRefs.class,
|
||||
new BasicDBObject("dbRefToPlainObject", dbRef));
|
||||
|
||||
ReflectionTestUtils.invokeMethod(result.dbRefToPlainObject, "finalize");
|
||||
|
||||
assertProxyIsResolved(result.dbRefToPlainObject, false);
|
||||
}
|
||||
|
||||
private Object transport(Object result) {
|
||||
return SerializationUtils.deserialize(SerializationUtils.serialize(result));
|
||||
}
|
||||
|
||||
@@ -50,14 +50,18 @@ import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.beans.ConversionNotSupportedException;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.annotation.TypeAlias;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.geo.Box;
|
||||
import org.springframework.data.geo.Circle;
|
||||
import org.springframework.data.geo.Distance;
|
||||
@@ -70,6 +74,7 @@ import org.springframework.data.mapping.model.MappingInstantiationException;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.convert.DBObjectAccessorUnitTests.NestedType;
|
||||
import org.springframework.data.mongodb.core.convert.DBObjectAccessorUnitTests.ProjectingType;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverterUnitTests.ClassWithMapUsingEnumAsKey.FooBarEnum;
|
||||
import org.springframework.data.mongodb.core.geo.Sphere;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
@@ -1853,6 +1858,84 @@ public class MappingMongoConverterUnitTests {
|
||||
converter.read(Item.class, source);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1058
|
||||
*/
|
||||
@Test
|
||||
public void readShouldRespectExplicitFieldNameForDbRef() {
|
||||
|
||||
BasicDBObject source = new BasicDBObject();
|
||||
source.append("explict-name-for-db-ref", new DBRef(mock(DB.class), "foo", "1"));
|
||||
|
||||
converter.read(ClassWithExplicitlyNamedDBRefProperty.class, source);
|
||||
|
||||
verify(resolver, times(1)).resolveDbRef(Mockito.any(MongoPersistentProperty.class), Mockito.any(DBRef.class),
|
||||
Mockito.any(DbRefResolverCallback.class), Mockito.any(DbRefProxyHandler.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void convertsMapKeyUsingCustomConverterForAndBackwards() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new FooBarEnumToStringConverter(),
|
||||
new StringToFooNumConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
ClassWithMapUsingEnumAsKey source = new ClassWithMapUsingEnumAsKey();
|
||||
source.map = new HashMap<FooBarEnum, String>();
|
||||
source.map.put(FooBarEnum.FOO, "wohoo");
|
||||
|
||||
DBObject target = new BasicDBObject();
|
||||
converter.write(source, target);
|
||||
|
||||
assertThat(converter.read(ClassWithMapUsingEnumAsKey.class, target).map, is(source.map));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
public void writesMapKeyUsingCustomConverter() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new FooBarEnumToStringConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
ClassWithMapUsingEnumAsKey source = new ClassWithMapUsingEnumAsKey();
|
||||
source.map = new HashMap<FooBarEnum, String>();
|
||||
source.map.put(FooBarEnum.FOO, "spring");
|
||||
source.map.put(FooBarEnum.BAR, "data");
|
||||
|
||||
DBObject target = new BasicDBObject();
|
||||
converter.write(source, target);
|
||||
|
||||
DBObject map = DBObjectTestUtils.getAsDBObject(target, "map");
|
||||
|
||||
assertThat(map.containsField("foo-enum-value"), is(true));
|
||||
assertThat(map.containsField("bar-enum-value"), is(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1118
|
||||
*/
|
||||
@Test
|
||||
public void readsMapKeyUsingCustomConverter() {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext);
|
||||
converter.setCustomConversions(new CustomConversions(Arrays.asList(new StringToFooNumConverter())));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
DBObject source = new BasicDBObject("map", new BasicDBObject("foo-enum-value", "spring"));
|
||||
|
||||
ClassWithMapUsingEnumAsKey target = converter.read(ClassWithMapUsingEnumAsKey.class, source);
|
||||
|
||||
assertThat(target.map.get(FooBarEnum.FOO), is("spring"));
|
||||
}
|
||||
|
||||
static class GenericType<T> {
|
||||
T content;
|
||||
}
|
||||
@@ -2102,4 +2185,60 @@ public class MappingMongoConverterUnitTests {
|
||||
|
||||
@TextScore Float score;
|
||||
}
|
||||
|
||||
class ClassWithExplicitlyNamedDBRefProperty {
|
||||
|
||||
@Field("explict-name-for-db-ref")//
|
||||
@org.springframework.data.mongodb.core.mapping.DBRef//
|
||||
ClassWithIntId dbRefProperty;
|
||||
|
||||
public ClassWithIntId getDbRefProperty() {
|
||||
return dbRefProperty;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class ClassWithMapUsingEnumAsKey {
|
||||
|
||||
static enum FooBarEnum {
|
||||
FOO, BAR;
|
||||
}
|
||||
|
||||
Map<FooBarEnum, String> map;
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
static class FooBarEnumToStringConverter implements Converter<FooBarEnum, String> {
|
||||
|
||||
@Override
|
||||
public String convert(FooBarEnum source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return FooBarEnum.FOO.equals(source) ? "foo-enum-value" : "bar-enum-value";
|
||||
}
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
static class StringToFooNumConverter implements Converter<String, FooBarEnum> {
|
||||
|
||||
@Override
|
||||
public FooBarEnum convert(String source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (source.equals("foo-enum-value")) {
|
||||
return FooBarEnum.FOO;
|
||||
}
|
||||
if (source.equals("bar-enum-value")) {
|
||||
return FooBarEnum.BAR;
|
||||
}
|
||||
|
||||
throw new ConversionNotSupportedException(source, String.class, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -658,6 +658,22 @@ public class QueryMapperUnitTests {
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("_id", 1).get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1070
|
||||
*/
|
||||
@Test
|
||||
public void mapsIdReferenceToDBRefCorrectly() {
|
||||
|
||||
ObjectId id = new ObjectId();
|
||||
|
||||
DBObject query = new BasicDBObject("reference.id", new com.mongodb.DBRef(null, "reference", id.toString()));
|
||||
DBObject result = mapper.getMappedObject(query, context.getPersistentEntity(WithDBRef.class));
|
||||
|
||||
assertThat(result.containsField("reference"), is(true));
|
||||
com.mongodb.DBRef reference = getTypedValue(result, "reference", com.mongodb.DBRef.class);
|
||||
assertThat(reference.getId(), is(instanceOf(ObjectId.class)));
|
||||
}
|
||||
|
||||
@Document
|
||||
public class Foo {
|
||||
@Id private ObjectId id;
|
||||
|
||||
@@ -508,6 +508,23 @@ public class UpdateMapperUnitTests {
|
||||
assertThat(list, equalTo(new BasicDBObjectBuilder().add("_id", "1").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1077
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotRemovePositionalParameter() {
|
||||
|
||||
Update update = new Update();
|
||||
update.unset("dbRefAnnotatedList.$");
|
||||
|
||||
DBObject mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DocumentWithDBRefCollection.class));
|
||||
|
||||
DBObject $unset = DBObjectTestUtils.getAsDBObject(mappedUpdate, "$unset");
|
||||
|
||||
assertThat($unset, equalTo(new BasicDBObjectBuilder().add("dbRefAnnotatedList.$", 1).get()));
|
||||
}
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Document(collection = "DocumentWithReferenceToInterface")
|
||||
static interface DocumentWithReferenceToInterface {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,10 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.hamcrest.collection.IsCollectionWithSize.*;
|
||||
import static org.hamcrest.collection.IsEmptyCollection.*;
|
||||
import static org.hamcrest.core.IsEqual.*;
|
||||
import static org.hamcrest.core.IsInstanceOf.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@@ -26,8 +23,6 @@ import java.lang.annotation.Annotation;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.collection.IsEmptyCollection;
|
||||
import org.hamcrest.core.IsEqual;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Suite;
|
||||
@@ -461,7 +456,7 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
indexDefinitions.get(0));
|
||||
|
||||
DBObject weights = DBObjectTestUtils.getAsDBObject(indexDefinitions.get(0).getIndexOptions(), "weights");
|
||||
assertThat(weights.get("nested.foo"), IsEqual.<Object> equalTo(5F));
|
||||
assertThat(weights.get("nested.foo"), is((Object) 5F));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -476,8 +471,8 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
"textIndexOnNestedWithMostSpecificValueRoot", indexDefinitions.get(0));
|
||||
|
||||
DBObject weights = DBObjectTestUtils.getAsDBObject(indexDefinitions.get(0).getIndexOptions(), "weights");
|
||||
assertThat(weights.get("nested.foo"), IsEqual.<Object> equalTo(5F));
|
||||
assertThat(weights.get("nested.bar"), IsEqual.<Object> equalTo(10F));
|
||||
assertThat(weights.get("nested.foo"), is((Object) 5F));
|
||||
assertThat(weights.get("nested.bar"), is((Object) 10F));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -487,17 +482,57 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
public void shouldSetDefaultLanguageCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithDefaultLanguage.class);
|
||||
assertThat(indexDefinitions.get(0).getIndexOptions().get("default_language"), IsEqual.<Object> equalTo("spanish"));
|
||||
assertThat(indexDefinitions.get(0).getIndexOptions().get("default_language"), is((Object) "spanish"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-937
|
||||
* @see DATAMONGO-937, DATAMONGO-1049
|
||||
*/
|
||||
@Test
|
||||
public void shouldResolveTextIndexLanguageOverrideCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithLanguageOverrideOnNestedElementRoot.class);
|
||||
assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), IsEqual.<Object> equalTo("lang"));
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithLanguageOverride.class);
|
||||
assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is((Object) "lang"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1049
|
||||
*/
|
||||
@Test
|
||||
public void shouldIgnoreTextIndexLanguageOverrideOnNestedElements() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithLanguageOverrideOnNestedElement.class);
|
||||
assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1049
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotCreateIndexDefinitionWhenOnlyLanguageButNoTextIndexPresent() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNoTextIndexPropertyButReservedFieldLanguage.class);
|
||||
assertThat(indexDefinitions, is(empty()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1049
|
||||
*/
|
||||
@Test
|
||||
public void shouldNotCreateIndexDefinitionWhenOnlyAnnotatedLanguageButNoTextIndexPresent() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithNoTextIndexPropertyButReservedFieldLanguageAnnotated.class);
|
||||
assertThat(indexDefinitions, is(empty()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1049
|
||||
*/
|
||||
@Test
|
||||
public void shouldPreferExplicitlyAnnotatedLanguageProperty() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(DocumentWithOverlappingLanguageProps.class);
|
||||
assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is((Object) "lang"));
|
||||
}
|
||||
|
||||
@Document
|
||||
@@ -527,14 +562,12 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
static class TextIndexOnNested {
|
||||
|
||||
String foo;
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
static class TextIndexOnNestedWithWeightRoot {
|
||||
|
||||
@TextIndexed(weight = 5) TextIndexOnNested nested;
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
@@ -554,18 +587,39 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithLanguageOverrideOnNestedElementRoot {
|
||||
static class DocumentWithLanguageOverrideOnNestedElement {
|
||||
|
||||
DocumentWithLanguageOverrideOnNestedElement nested;
|
||||
DocumentWithLanguageOverride nested;
|
||||
}
|
||||
|
||||
static class DocumentWithLanguageOverrideOnNestedElement {
|
||||
@Document
|
||||
static class DocumentWithLanguageOverride {
|
||||
|
||||
@TextIndexed String foo;
|
||||
|
||||
@Language String lang;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNoTextIndexPropertyButReservedFieldLanguage {
|
||||
|
||||
String language;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithNoTextIndexPropertyButReservedFieldLanguageAnnotated {
|
||||
|
||||
@Field("language") String lang;
|
||||
}
|
||||
|
||||
@Document
|
||||
static class DocumentWithOverlappingLanguageProps {
|
||||
|
||||
@TextIndexed String foo;
|
||||
String language;
|
||||
@Language String lang;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class MixedIndexResolutionTests {
|
||||
@@ -670,7 +724,7 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
public void shouldDetectSelfCycleViaCollectionTypeCorrectly() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(SelfCyclingViaCollectionType.class);
|
||||
assertThat(indexDefinitions, IsEmptyCollection.empty());
|
||||
assertThat(indexDefinitions, empty());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -680,7 +734,7 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
public void shouldNotDetectCycleWhenTypeIsUsedMoreThanOnce() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultipleObjectsOfSameType.class);
|
||||
assertThat(indexDefinitions, IsEmptyCollection.empty());
|
||||
assertThat(indexDefinitions, empty());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -776,6 +830,47 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("property_index"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1087
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnRoot() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultiplePropertiesOfSameTypeWithMatchingStartLetters.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("name.component"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("nameLast.component"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1087
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("component.nameLast"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("component.name"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1121
|
||||
*/
|
||||
@Test
|
||||
public void shouldOnlyConsiderEntitiesAsPotentialCycleCandidates() {
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = prepareMappingContextAndResolveIndexForType(OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths.class);
|
||||
|
||||
assertThat(indexDefinitions, hasSize(2));
|
||||
assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("path1.foo"));
|
||||
assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"),
|
||||
equalTo("path2.propertyWithIndexedStructure.foo"));
|
||||
|
||||
}
|
||||
|
||||
@Document
|
||||
static class MixedIndexRoot {
|
||||
|
||||
@@ -916,6 +1011,41 @@ public class MongoPersistentEntityIndexResolverUnitTests {
|
||||
TypeWithNamedIndex propertyOfTypeHavingNamedIndex;
|
||||
}
|
||||
|
||||
@Document
|
||||
public class MultiplePropertiesOfSameTypeWithMatchingStartLetters {
|
||||
|
||||
public class NameComponent {
|
||||
|
||||
@Indexed String component;
|
||||
}
|
||||
|
||||
NameComponent name;
|
||||
NameComponent nameLast;
|
||||
}
|
||||
|
||||
@Document
|
||||
public class MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty {
|
||||
|
||||
public class NameComponent {
|
||||
|
||||
@Indexed String nameLast;
|
||||
@Indexed String name;
|
||||
}
|
||||
|
||||
NameComponent component;
|
||||
}
|
||||
|
||||
@Document
|
||||
public static class OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths {
|
||||
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested path1;
|
||||
AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument path2;
|
||||
}
|
||||
|
||||
public static class AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument {
|
||||
NoCycleButIndenticallNamedPropertiesDeeplyNested propertyWithIndexedStructure;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static List<IndexDefinitionHolder> prepareMappingContextAndResolveIndexForType(Class<?> type) {
|
||||
|
||||
@@ -24,6 +24,7 @@ import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
|
||||
/**
|
||||
@@ -36,6 +37,7 @@ import org.springframework.data.util.ClassTypeInformation;
|
||||
public class BasicMongoPersistentEntityUnitTests {
|
||||
|
||||
@Mock ApplicationContext context;
|
||||
@Mock MongoPersistentProperty propertyMock;
|
||||
|
||||
@Test
|
||||
public void subclassInheritsAtDocumentAnnotation() {
|
||||
@@ -53,6 +55,9 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
assertThat(entity.getCollection(), is("35"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-65, DATAMONGO-1108
|
||||
*/
|
||||
@Test
|
||||
public void collectionAllowsReferencingSpringBean() {
|
||||
|
||||
@@ -67,6 +72,9 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
entity.setApplicationContext(context);
|
||||
|
||||
assertThat(entity.getCollection(), is("reference"));
|
||||
|
||||
provider.collectionName = "otherReference";
|
||||
assertThat(entity.getCollection(), is("otherReference"));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -80,6 +88,61 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
assertThat(entity.getLanguage(), is("spanish"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1053
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@Test(expected = MappingException.class)
|
||||
public void verifyShouldThrowExceptionForInvalidTypeOfExplicitLanguageProperty() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
|
||||
when(propertyMock.isExplicitLanguageProperty()).thenReturn(true);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) Number.class);
|
||||
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
entity.verify();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1053
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@Test
|
||||
public void verifyShouldPassForStringAsExplicitLanguageProperty() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
when(propertyMock.isExplicitLanguageProperty()).thenReturn(true);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) String.class);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
|
||||
verify(propertyMock, times(1)).isExplicitLanguageProperty();
|
||||
verify(propertyMock, times(1)).getActualType();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1053
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@Test
|
||||
public void verifyShouldIgnoreNonExplicitLanguageProperty() {
|
||||
|
||||
BasicMongoPersistentEntity<AnyDocument> entity = new BasicMongoPersistentEntity<AnyDocument>(
|
||||
ClassTypeInformation.from(AnyDocument.class));
|
||||
when(propertyMock.isExplicitLanguageProperty()).thenReturn(false);
|
||||
when(propertyMock.getActualType()).thenReturn((Class) Number.class);
|
||||
entity.addPersistentProperty(propertyMock);
|
||||
|
||||
entity.verify();
|
||||
|
||||
verify(propertyMock, times(1)).isExplicitLanguageProperty();
|
||||
verify(propertyMock, never()).getActualType();
|
||||
}
|
||||
|
||||
@Document(collection = "contacts")
|
||||
class Contact {
|
||||
|
||||
@@ -111,4 +174,8 @@ public class BasicMongoPersistentEntityUnitTests {
|
||||
static class DocumentWithLanguage {
|
||||
|
||||
}
|
||||
|
||||
static class AnyDocument {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,6 +106,7 @@ public class MongoMappingContextUnitTests {
|
||||
exception.expectMessage("firstname");
|
||||
exception.expectMessage("lastname");
|
||||
exception.expectMessage("foo");
|
||||
exception.expectMessage("@Field");
|
||||
|
||||
MongoMappingContext context = new MongoMappingContext();
|
||||
context.setApplicationContext(applicationContext);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.core.query;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import nl.jqno.equalsverifier.EqualsVerifier;
|
||||
import nl.jqno.equalsverifier.Warning;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
@@ -29,6 +31,7 @@ import com.mongodb.DBObject;
|
||||
* Unit tests for {@link BasicQuery}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
public class BasicQueryUnitTests {
|
||||
|
||||
@@ -58,4 +61,80 @@ public class BasicQueryUnitTests {
|
||||
sortReference.put("lastname", 1);
|
||||
assertThat(query.getSortObject(), is(sortReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1093
|
||||
*/
|
||||
@Test
|
||||
public void equalsContract() {
|
||||
|
||||
BasicQuery query1 = new BasicQuery("{ \"name\" : \"Thomas\"}", "{\"name\":1, \"age\":1}");
|
||||
query1.setSortObject(new BasicDBObject("name", -1));
|
||||
|
||||
BasicQuery query2 = new BasicQuery("{ \"name\" : \"Oliver\"}", "{\"name\":1, \"address\":1}");
|
||||
query2.setSortObject(new BasicDBObject("name", 1));
|
||||
|
||||
EqualsVerifier.forExamples(query1, query2) //
|
||||
.withRedefinedSuperclass() //
|
||||
.suppress(Warning.NONFINAL_FIELDS, Warning.NULL_FIELDS, Warning.STRICT_INHERITANCE) //
|
||||
.verify();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1093
|
||||
*/
|
||||
@Test
|
||||
public void handlesEqualsAndHashCodeCorrectlyForExactCopies() {
|
||||
|
||||
String qry = "{ \"name\" : \"Thomas\"}";
|
||||
String fields = "{\"name\":1, \"age\":1}";
|
||||
|
||||
BasicQuery query1 = new BasicQuery(qry, fields);
|
||||
query1.setSortObject(new BasicDBObject("name", -1));
|
||||
|
||||
BasicQuery query2 = new BasicQuery(qry, fields);
|
||||
query2.setSortObject(new BasicDBObject("name", -1));
|
||||
|
||||
assertThat(query1, is(equalTo(query1)));
|
||||
assertThat(query1, is(equalTo(query2)));
|
||||
assertThat(query1.hashCode(), is(query2.hashCode()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1093
|
||||
*/
|
||||
@Test
|
||||
public void handlesEqualsAndHashCodeCorrectlyWhenBasicQuerySettingsDiffer() {
|
||||
|
||||
String qry = "{ \"name\" : \"Thomas\"}";
|
||||
String fields = "{\"name\":1, \"age\":1}";
|
||||
|
||||
BasicQuery query1 = new BasicQuery(qry, fields);
|
||||
query1.setSortObject(new BasicDBObject("name", -1));
|
||||
|
||||
BasicQuery query2 = new BasicQuery(qry, fields);
|
||||
query2.setSortObject(new BasicDBObject("name", 1));
|
||||
|
||||
assertThat(query1, is(not(equalTo(query2))));
|
||||
assertThat(query1.hashCode(), is(not(query2.hashCode())));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1093
|
||||
*/
|
||||
@Test
|
||||
public void handlesEqualsAndHashCodeCorrectlyWhenQuerySettingsDiffer() {
|
||||
|
||||
String qry = "{ \"name\" : \"Thomas\"}";
|
||||
String fields = "{\"name\":1, \"age\":1}";
|
||||
|
||||
BasicQuery query1 = new BasicQuery(qry, fields);
|
||||
query1.getMeta().setComment("foo");
|
||||
|
||||
BasicQuery query2 = new BasicQuery(qry, fields);
|
||||
query2.getMeta().setComment("bar");
|
||||
|
||||
assertThat(query1, is(not(equalTo(query2))));
|
||||
assertThat(query1.hashCode(), is(not(query2.hashCode())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,12 +22,14 @@ import org.junit.Test;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class CriteriaTests {
|
||||
|
||||
@Test
|
||||
@@ -72,50 +74,94 @@ public class CriteriaTests {
|
||||
assertThat(left, is(not(right)));
|
||||
assertThat(right, is(not(left)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateAndOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.andOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateOrOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.orOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateNorOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.norOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test
|
||||
public void shouldNegateFollowingSimpleExpression() {
|
||||
|
||||
Criteria c = Criteria.where("age").not().gt(18).and("status").is("student");
|
||||
DBObject co = c.getCriteriaObject();
|
||||
|
||||
assertThat(co, is(notNullValue()));
|
||||
assertThat(co.toString(), is("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateAndOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.andOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateOrOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.orOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void shouldThrowExceptionWhenTryingToNegateNorOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.norOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-507
|
||||
*/
|
||||
@Test
|
||||
public void shouldNegateFollowingSimpleExpression() {
|
||||
|
||||
Criteria c = Criteria.where("age").not().gt(18).and("status").is("student");
|
||||
DBObject co = c.getCriteriaObject();
|
||||
|
||||
assertThat(co, is(notNullValue()));
|
||||
assertThat(co.toString(), is("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldReturnEmptyDBOWhenNoCriteriaSpecified() {
|
||||
|
||||
DBObject dbo = new Criteria().getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$lt", "foo").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").gt("bar").getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$lt", "foo").add("$gt", "bar").get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1068
|
||||
*/
|
||||
@Test
|
||||
public void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() {
|
||||
|
||||
DBObject dbo = new Criteria().lt("foo").not().getCriteriaObject();
|
||||
|
||||
assertThat(dbo, equalTo(new BasicDBObjectBuilder().add("$not", new BasicDBObject("$lt", "foo")).get()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,10 +19,12 @@ import static java.util.Arrays.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
@@ -46,6 +48,7 @@ import org.springframework.data.geo.Polygon;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.repository.Person.Sex;
|
||||
import org.springframework.data.querydsl.QSort;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
/**
|
||||
@@ -1023,6 +1026,115 @@ public abstract class AbstractPersonRepositoryIntegrationTests {
|
||||
assertThat(result, is(notNullValue()));
|
||||
assertThat(result.firstname, is("Carter"));
|
||||
assertThat(result.lastname, is("Beauford"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1057
|
||||
*/
|
||||
@Test
|
||||
public void sliceShouldTraverseElementsWithoutSkippingOnes() {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
List<Person> persons = new ArrayList<Person>(100);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
// format firstname to assert sorting retains proper order
|
||||
persons.add(new Person(String.format("%03d", i), "ln" + 1, 100));
|
||||
}
|
||||
|
||||
repository.save(persons);
|
||||
|
||||
Slice<Person> slice = repository.findByAgeGreaterThan(50, new PageRequest(0, 20, Direction.ASC, "firstname"));
|
||||
assertThat(slice, contains(persons.subList(0, 20).toArray()));
|
||||
|
||||
slice = repository.findByAgeGreaterThan(50, slice.nextPageable());
|
||||
assertThat(slice, contains(persons.subList(20, 40).toArray()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1072
|
||||
*/
|
||||
@Test
|
||||
public void shouldBindPlaceholdersUsedAsKeysCorrectly() {
|
||||
|
||||
List<Person> persons = repository.findByKeyValue("firstname", alicia.getFirstname());
|
||||
|
||||
assertThat(persons, hasSize(1));
|
||||
assertThat(persons, hasItem(alicia));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1085
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportSortingByQueryDslOrderSpecifier() {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
List<Person> persons = new ArrayList<Person>();
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
Person person = new Person(String.format("Siggi %s", i), "Bar", 30);
|
||||
person.setAddress(new Address(String.format("Street %s", i), "12345", "SinCity"));
|
||||
persons.add(person);
|
||||
}
|
||||
|
||||
repository.save(persons);
|
||||
|
||||
QPerson person = QPerson.person;
|
||||
|
||||
Iterable<Person> result = repository.findAll(person.firstname.isNotNull(), person.address.street.desc());
|
||||
|
||||
assertThat(result, is(Matchers.<Person> iterableWithSize(persons.size())));
|
||||
assertThat(result.iterator().next().getFirstname(), is(persons.get(2).getFirstname()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1085
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportSortingWithQSortByQueryDslOrderSpecifier() throws Exception {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
List<Person> persons = new ArrayList<Person>();
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
Person person = new Person(String.format("Siggi %s", i), "Bar", 30);
|
||||
person.setAddress(new Address(String.format("Street %s", i), "12345", "SinCity"));
|
||||
persons.add(person);
|
||||
}
|
||||
|
||||
repository.save(persons);
|
||||
|
||||
PageRequest pageRequest = new PageRequest(0, 2, new QSort(person.address.street.desc()));
|
||||
Iterable<Person> result = repository.findAll(pageRequest);
|
||||
|
||||
assertThat(result, is(Matchers.<Person> iterableWithSize(2)));
|
||||
assertThat(result.iterator().next().getFirstname(), is("Siggi 2"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1085
|
||||
*/
|
||||
@Test
|
||||
public void shouldSupportSortingWithQSort() throws Exception {
|
||||
|
||||
repository.deleteAll();
|
||||
|
||||
List<Person> persons = new ArrayList<Person>();
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
Person person = new Person(String.format("Siggi %s", i), "Bar", 30);
|
||||
person.setAddress(new Address(String.format("Street %s", i), "12345", "SinCity"));
|
||||
persons.add(person);
|
||||
}
|
||||
|
||||
repository.save(persons);
|
||||
|
||||
Iterable<Person> result = repository.findAll(new QSort(person.address.street.desc()));
|
||||
|
||||
assertThat(result, is(Matchers.<Person> iterableWithSize(persons.size())));
|
||||
assertThat(result.iterator().next().getFirstname(), is("Siggi 2"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,132 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class ComplexIdRepositoryIntegrationTests {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
static class Config extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
protected String getDatabaseName() {
|
||||
return "complexIdTest";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mongo mongo() throws Exception {
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Autowired UserWithComplexIdRepository repo;
|
||||
@Autowired MongoTemplate template;
|
||||
|
||||
MyId id;
|
||||
UserWithComplexId userWithId;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
repo.deleteAll();
|
||||
|
||||
id = new MyId();
|
||||
id.val1 = "v1";
|
||||
id.val2 = "v2";
|
||||
|
||||
userWithId = new UserWithComplexId();
|
||||
userWithId.firstname = "foo";
|
||||
userWithId.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void annotatedFindQueryShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
assertThat(repo.getUserByComplexId(id), is(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void annotatedFindQueryShouldWorkWhenUsingComplexIdWithinCollection() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
List<UserWithComplexId> loaded = repo.findByUserIds(Collections.singleton(id));
|
||||
|
||||
assertThat(loaded, hasSize(1));
|
||||
assertThat(loaded, contains(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void findOneShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
assertThat(repo.findOne(id), is(userWithId));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1078
|
||||
*/
|
||||
@Test
|
||||
public void findAllShouldWorkWhenUsingComplexId() {
|
||||
|
||||
repo.save(userWithId);
|
||||
|
||||
Iterable<UserWithComplexId> loaded = repo.findAll(Collections.singleton(id));
|
||||
|
||||
assertThat(loaded, is(Matchers.<UserWithComplexId> iterableWithSize(1)));
|
||||
assertThat(loaded, contains(userWithId));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class MyId implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -7129201311241750831L;
|
||||
|
||||
String val1;
|
||||
String val2;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 31;
|
||||
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(val1);
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(val2);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof MyId)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MyId that = (MyId) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.val1, that.val1) && ObjectUtils.nullSafeEquals(this.val2, that.val2);
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,7 @@ import org.springframework.data.mongodb.core.index.GeoSpatialIndexed;
|
||||
import org.springframework.data.mongodb.core.index.Indexed;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
|
||||
/**
|
||||
* Sample domain class.
|
||||
@@ -46,9 +47,11 @@ public class Person extends Contact {
|
||||
@SuppressWarnings("unused") private Sex sex;
|
||||
Date createdAt;
|
||||
|
||||
List<String> skills;
|
||||
|
||||
@GeoSpatialIndexed private Point location;
|
||||
|
||||
private Address address;
|
||||
private @Field("add") Address address;
|
||||
private Set<Address> shippingAddresses;
|
||||
|
||||
@DBRef User creator;
|
||||
@@ -271,6 +274,14 @@ public class Person extends Contact {
|
||||
this.creator = creator;
|
||||
}
|
||||
|
||||
public void setSkills(List<String> skills) {
|
||||
this.skills = skills;
|
||||
}
|
||||
|
||||
public List<String> getSkills() {
|
||||
return skills;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
|
||||
@@ -317,4 +317,7 @@ public interface PersonRepository extends MongoRepository<Person, String>, Query
|
||||
* @see DATAMONGO-1030
|
||||
*/
|
||||
PersonSummary findSummaryByLastname(String lastname);
|
||||
|
||||
@Query("{ ?0 : ?1 }")
|
||||
List<Person> findByKeyValue(String key, String value);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@Document
|
||||
public class UserWithComplexId {
|
||||
|
||||
@Id MyId id;
|
||||
String firstname;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = 31;
|
||||
|
||||
result += 17 * ObjectUtils.nullSafeHashCode(id);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof UserWithComplexId)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UserWithComplexId that = (UserWithComplexId) obj;
|
||||
|
||||
return ObjectUtils.nullSafeEquals(this.id, that.id);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.repository.CrudRepository;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface UserWithComplexIdRepository extends CrudRepository<UserWithComplexId, MyId> {
|
||||
|
||||
@Query("{'_id': {$in: ?0}}")
|
||||
List<UserWithComplexId> findByUserIds(Collection<MyId> ids);
|
||||
|
||||
@Query("{'_id': ?0}")
|
||||
UserWithComplexId getUserByComplexId(MyId id);
|
||||
}
|
||||
@@ -15,14 +15,16 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Matchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.types.ObjectId;
|
||||
import org.hamcrest.core.Is;
|
||||
@@ -32,10 +34,13 @@ import org.junit.runner.RunWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Matchers;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.Person;
|
||||
@@ -50,6 +55,8 @@ import org.springframework.data.mongodb.repository.Meta;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
/**
|
||||
@@ -93,8 +100,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
createQueryForMethod("deletePersonByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" });
|
||||
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), Matchers.eq(Person.class),
|
||||
Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), eq(Person.class), eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(0)).find(Matchers.any(Query.class), Matchers.any(Class.class),
|
||||
Matchers.anyString());
|
||||
}
|
||||
@@ -112,8 +118,8 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
createQueryForMethod("deleteByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" });
|
||||
|
||||
verify(this.mongoOperationsMock, times(1)).findAllAndRemove(Matchers.any(Query.class), Matchers.eq(Person.class),
|
||||
Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).findAllAndRemove(Matchers.any(Query.class), eq(Person.class),
|
||||
eq("persons"));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -136,15 +142,14 @@ public class AbstractMongoQueryUnitTests {
|
||||
public void testDeleteExecutionReturnsNrDocumentsDeletedFromWriteResult() {
|
||||
|
||||
when(writeResultMock.getN()).thenReturn(100);
|
||||
when(this.mongoOperationsMock.remove(Matchers.any(Query.class), Matchers.eq(Person.class), Matchers.eq("persons")))
|
||||
.thenReturn(writeResultMock);
|
||||
when(this.mongoOperationsMock.remove(Matchers.any(Query.class), eq(Person.class), eq("persons"))).thenReturn(
|
||||
writeResultMock);
|
||||
|
||||
MongoQueryFake query = createQueryForMethod("deletePersonByLastname", String.class);
|
||||
query.setDeleteQuery(true);
|
||||
|
||||
assertThat(query.execute(new Object[] { "fake" }), is((Object) 100L));
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), Matchers.eq(Person.class),
|
||||
Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).remove(Matchers.any(Query.class), eq(Person.class), eq("persons"));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -158,8 +163,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(this.mongoOperationsMock, times(1))
|
||||
.find(captor.capture(), Matchers.eq(Person.class), Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
|
||||
assertThat(captor.getValue().getMeta().getComment(), nullValue());
|
||||
}
|
||||
@@ -175,8 +179,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(this.mongoOperationsMock, times(1))
|
||||
.find(captor.capture(), Matchers.eq(Person.class), Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
assertThat(captor.getValue().getMeta().getComment(), is("comment"));
|
||||
}
|
||||
|
||||
@@ -191,7 +194,7 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(this.mongoOperationsMock, times(1)).count(captor.capture(), Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).count(captor.capture(), eq(Person.class), eq("persons"));
|
||||
assertThat(captor.getValue().getMeta().getComment(), is("comment"));
|
||||
}
|
||||
|
||||
@@ -206,11 +209,89 @@ public class AbstractMongoQueryUnitTests {
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(this.mongoOperationsMock, times(1))
|
||||
.find(captor.capture(), Matchers.eq(Person.class), Matchers.eq("persons"));
|
||||
verify(this.mongoOperationsMock, times(1)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
assertThat(captor.getValue().getMeta().getComment(), is("comment"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1057
|
||||
*/
|
||||
@Test
|
||||
public void slicedExecutionShouldRetainNrOfElementsToSkip() {
|
||||
|
||||
MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class);
|
||||
Pageable page1 = new PageRequest(0, 10);
|
||||
Pageable page2 = page1.next();
|
||||
|
||||
query.execute(new Object[] { "fake", page1 });
|
||||
query.execute(new Object[] { "fake", page2 });
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(this.mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
|
||||
assertThat(captor.getAllValues().get(0).getSkip(), is(0));
|
||||
assertThat(captor.getAllValues().get(1).getSkip(), is(10));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1057
|
||||
*/
|
||||
@Test
|
||||
public void slicedExecutionShouldIncrementLimitByOne() {
|
||||
|
||||
MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class);
|
||||
Pageable page1 = new PageRequest(0, 10);
|
||||
Pageable page2 = page1.next();
|
||||
|
||||
query.execute(new Object[] { "fake", page1 });
|
||||
query.execute(new Object[] { "fake", page2 });
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(this.mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
|
||||
assertThat(captor.getAllValues().get(0).getLimit(), is(11));
|
||||
assertThat(captor.getAllValues().get(1).getLimit(), is(11));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1057
|
||||
*/
|
||||
@Test
|
||||
public void slicedExecutionShouldRetainSort() {
|
||||
|
||||
MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class);
|
||||
Pageable page1 = new PageRequest(0, 10, Sort.Direction.DESC, "bar");
|
||||
Pageable page2 = page1.next();
|
||||
|
||||
query.execute(new Object[] { "fake", page1 });
|
||||
query.execute(new Object[] { "fake", page2 });
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
|
||||
verify(this.mongoOperationsMock, times(2)).find(captor.capture(), eq(Person.class), eq("persons"));
|
||||
|
||||
DBObject expectedSortObject = new BasicDBObjectBuilder().add("bar", -1).get();
|
||||
assertThat(captor.getAllValues().get(0).getSortObject(), is(expectedSortObject));
|
||||
assertThat(captor.getAllValues().get(1).getSortObject(), is(expectedSortObject));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1080
|
||||
*/
|
||||
@Test
|
||||
public void doesNotTryToPostProcessQueryResultIntoWrapperType() {
|
||||
|
||||
Person reference = new Person();
|
||||
when(mongoOperationsMock.findOne(Mockito.any(Query.class), eq(Person.class), eq("persons"))).//
|
||||
thenReturn(reference);
|
||||
|
||||
AbstractMongoQuery query = createQueryForMethod("findByLastname", String.class);
|
||||
|
||||
assertThat(query.execute(new Object[] { "lastname" }), is((Object) reference));
|
||||
}
|
||||
|
||||
private MongoQueryFake createQueryForMethod(String methodName, Class<?>... paramTypes) {
|
||||
|
||||
try {
|
||||
@@ -272,5 +353,9 @@ public class AbstractMongoQueryUnitTests {
|
||||
@org.springframework.data.mongodb.repository.Query("{}")
|
||||
Page<Person> findByAnnotatedQuery(String firstnanme, Pageable pageable);
|
||||
|
||||
/** @see DATAMONGO-1057 */
|
||||
Slice<Person> findByLastname(String lastname, Pageable page);
|
||||
|
||||
Optional<Person> findByLastname(String lastname);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,6 +41,7 @@ import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.Person;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
@@ -167,19 +168,6 @@ public class MongoQueryCreatorUnitTests {
|
||||
assertThat(creator.createQuery(), is(reference));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-291
|
||||
*/
|
||||
@Test
|
||||
public void honoursMappingInformationForPropertyPaths() {
|
||||
|
||||
PartTree partTree = new PartTree("findByUsername", User.class);
|
||||
|
||||
MongoQueryCreator creator = new MongoQueryCreator(partTree, getAccessor(converter, "Oliver"), context);
|
||||
Query reference = query(where("foo").is("Oliver"));
|
||||
assertThat(creator.createQuery(), is(reference));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-338
|
||||
*/
|
||||
@@ -268,7 +256,7 @@ public class MongoQueryCreatorUnitTests {
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Matt"), context);
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(query, is(query(where("foo").regex("^Matt"))));
|
||||
assertThat(query, is(query(where("username").regex("^Matt"))));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -281,7 +269,7 @@ public class MongoQueryCreatorUnitTests {
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "ews"), context);
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(query, is(query(where("foo").regex("ews$"))));
|
||||
assertThat(query, is(query(where("username").regex("ews$"))));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -294,7 +282,7 @@ public class MongoQueryCreatorUnitTests {
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "thew"), context);
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(query, is(query(where("foo").regex(".*thew.*"))));
|
||||
assertThat(query, is(query(where("username").regex(".*thew.*"))));
|
||||
}
|
||||
|
||||
private void assertBindsDistanceToQuery(Point point, Distance distance, Query reference) throws Exception {
|
||||
@@ -438,6 +426,36 @@ public class MongoQueryCreatorUnitTests {
|
||||
assertThat(query, is(query(where("firstName").regex("^dave$", "i").and("age").is(42))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1075
|
||||
*/
|
||||
@Test
|
||||
public void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() {
|
||||
|
||||
PartTree tree = new PartTree("findByEmailAddressesContaining", User.class);
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context);
|
||||
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(query, is(query(where("emailAddresses").in("dave"))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1139
|
||||
*/
|
||||
@Test
|
||||
public void createsNonShericalNearForDistanceWithDefaultMetric() {
|
||||
|
||||
Point point = new Point(1.0, 1.0);
|
||||
Distance distance = new Distance(1.0);
|
||||
|
||||
PartTree tree = new PartTree("findByLocationNear", Venue.class);
|
||||
MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point, distance), context);
|
||||
Query query = creator.createQuery();
|
||||
|
||||
assertThat(query, is(query(where("location").near(point).maxDistance(1.0))));
|
||||
}
|
||||
|
||||
interface PersonRepository extends Repository<Person, Long> {
|
||||
|
||||
List<Person> findByLocationNearAndFirstname(Point location, Distance maxDistance, String firstname);
|
||||
@@ -448,5 +466,7 @@ public class MongoQueryCreatorUnitTests {
|
||||
@Field("foo") String username;
|
||||
|
||||
@DBRef User creator;
|
||||
|
||||
List<String> emailAddresses;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,7 @@ import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.runners.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.core.DBObjectTestUtils;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
|
||||
@@ -42,7 +43,9 @@ import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BasicDBObjectBuilder;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link StringBasedMongoQuery}.
|
||||
@@ -255,6 +258,37 @@ public class StringBasedMongoQueryUnitTests {
|
||||
assertThat(query.getQueryObject(), is(reference.getQueryObject()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1070
|
||||
*/
|
||||
@Test
|
||||
public void parsesDbRefDeclarationsCorrectly() throws Exception {
|
||||
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("methodWithManuallyDefinedDbRef", String.class);
|
||||
ConvertingParameterAccessor parameterAccessor = StubParameterAccessor.getAccessor(converter, "myid");
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor);
|
||||
|
||||
DBRef dbRef = DBObjectTestUtils.getTypedValue(query.getQueryObject(), "reference", DBRef.class);
|
||||
assertThat(dbRef.getId(), is((Object) "myid"));
|
||||
assertThat(dbRef.getRef(), is("reference"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1072
|
||||
*/
|
||||
@Test
|
||||
public void shouldParseJsonKeyReplacementCorrectly() throws Exception {
|
||||
|
||||
StringBasedMongoQuery mongoQuery = createQueryForMethod("methodWithPlaceholderInKeyOfJsonStructure", String.class,
|
||||
String.class);
|
||||
ConvertingParameterAccessor parameterAccessor = StubParameterAccessor.getAccessor(converter, "key", "value");
|
||||
|
||||
org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor);
|
||||
|
||||
assertThat(query.getQueryObject(), is(new BasicDBObjectBuilder().add("key", "value").get()));
|
||||
}
|
||||
|
||||
private StringBasedMongoQuery createQueryForMethod(String name, Class<?>... parameters) throws Exception {
|
||||
|
||||
Method method = SampleRepository.class.getMethod(name, parameters);
|
||||
@@ -291,7 +325,14 @@ public class StringBasedMongoQueryUnitTests {
|
||||
@Query("{'title': { $regex : '^?0', $options : 'i'}}")
|
||||
List<DBObject> findByTitleBeginsWithExplicitQuoting(String title);
|
||||
|
||||
@Query(value = "{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}")
|
||||
@Query("{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}")
|
||||
List<DBObject> findByQueryWithParametersInExpression(int param1, int param2, int param3, int param4);
|
||||
|
||||
@Query("{ 'reference' : { $ref : 'reference', $id : ?0 }}")
|
||||
Object methodWithManuallyDefinedDbRef(String id);
|
||||
|
||||
@Query("{ ?0 : ?1}")
|
||||
Object methodWithPlaceholderInKeyOfJsonStructure(String keyReplacement, String valueReplacement);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ import org.springframework.data.repository.query.ParameterAccessor;
|
||||
class StubParameterAccessor implements MongoParameterAccessor {
|
||||
|
||||
private final Object[] values;
|
||||
private Distance distance;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ConvertingParameterAccessor} backed by a {@link StubParameterAccessor} simply returning the
|
||||
@@ -48,7 +49,14 @@ class StubParameterAccessor implements MongoParameterAccessor {
|
||||
}
|
||||
|
||||
public StubParameterAccessor(Object... values) {
|
||||
|
||||
this.values = values;
|
||||
|
||||
for (Object value : values) {
|
||||
if (value instanceof Distance) {
|
||||
this.distance = (Distance) value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -88,7 +96,7 @@ class StubParameterAccessor implements MongoParameterAccessor {
|
||||
* @see org.springframework.data.mongodb.repository.MongoParameterAccessor#getMaxDistance()
|
||||
*/
|
||||
public Distance getMaxDistance() {
|
||||
return null;
|
||||
return distance;
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.repository.support;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
@@ -40,8 +42,7 @@ import com.mysema.query.mongodb.MongodbQuery;
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class QuerydslRepositorySupportUnitTests {
|
||||
|
||||
@Autowired
|
||||
MongoOperations operations;
|
||||
@Autowired MongoOperations operations;
|
||||
Person person;
|
||||
|
||||
@Before
|
||||
@@ -54,9 +55,26 @@ public class QuerydslRepositorySupportUnitTests {
|
||||
@Test
|
||||
public void providesMongoQuery() {
|
||||
QPerson p = QPerson.person;
|
||||
QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {
|
||||
};
|
||||
QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {};
|
||||
MongodbQuery<Person> query = support.from(p).where(p.lastname.eq("Matthews"));
|
||||
assertThat(query.uniqueResult(), is(person));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1063
|
||||
*/
|
||||
@Test
|
||||
public void shouldAllowAny() {
|
||||
|
||||
person.setSkills(Arrays.asList("vocalist", "songwriter", "guitarist"));
|
||||
|
||||
operations.save(person);
|
||||
|
||||
QPerson p = QPerson.person;
|
||||
QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {};
|
||||
|
||||
MongodbQuery<Person> query = support.from(p).where(p.skills.any().in("guitarist"));
|
||||
|
||||
assertThat(query.uniqueResult(), is(person));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2012 the original author or authors.
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,10 +16,16 @@
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -34,13 +40,13 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
/**
|
||||
* @author <a href="mailto:kowsercse@gmail.com">A. B. M. Kowser</a>
|
||||
* @author Thomas Darimont
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class SimpleMongoRepositoryTests {
|
||||
|
||||
@Autowired
|
||||
private MongoTemplate template;
|
||||
@Autowired private MongoTemplate template;
|
||||
|
||||
private Person oliver, dave, carter, boyd, stefan, leroi, alicia;
|
||||
private List<Person> all;
|
||||
@@ -69,7 +75,7 @@ public class SimpleMongoRepositoryTests {
|
||||
List<Person> result = repository.findAll();
|
||||
assertThat(result, hasSize(all.size()));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void findOneFromCustomCollectionName() {
|
||||
Person result = repository.findOne(dave.getId());
|
||||
@@ -94,6 +100,74 @@ public class SimpleMongoRepositoryTests {
|
||||
assertThat(result, not(hasItem(dave)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1054
|
||||
*/
|
||||
@Test
|
||||
public void shouldInsertSingle() {
|
||||
|
||||
String randomId = UUID.randomUUID().toString();
|
||||
|
||||
Person person1 = new Person("First1" + randomId, "Last2" + randomId, 42);
|
||||
person1 = repository.insert(person1);
|
||||
|
||||
Person saved = repository.findOne(person1.getId());
|
||||
|
||||
assertThat(saved, is(equalTo(person1)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1054
|
||||
*/
|
||||
@Test
|
||||
public void shouldInsertMutlipleFromList() {
|
||||
|
||||
String randomId = UUID.randomUUID().toString();
|
||||
Map<String, Person> idToPerson = new HashMap<String, Person>();
|
||||
List<Person> persons = new ArrayList<Person>();
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Person person = new Person("First" + i + randomId, "Last" + randomId + i, 42 + i);
|
||||
idToPerson.put(person.getId(), person);
|
||||
persons.add(person);
|
||||
}
|
||||
|
||||
List<Person> saved = repository.insert(persons);
|
||||
|
||||
assertThat(saved, hasSize(persons.size()));
|
||||
assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see DATAMONGO-1054
|
||||
*/
|
||||
@Test
|
||||
public void shouldInsertMutlipleFromSet() {
|
||||
|
||||
String randomId = UUID.randomUUID().toString();
|
||||
Map<String, Person> idToPerson = new HashMap<String, Person>();
|
||||
Set<Person> persons = new HashSet<Person>();
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Person person = new Person("First" + i + randomId, "Last" + i + randomId, 42 + i);
|
||||
idToPerson.put(person.getId(), person);
|
||||
persons.add(person);
|
||||
}
|
||||
|
||||
List<Person> saved = repository.insert(persons);
|
||||
|
||||
assertThat(saved, hasSize(persons.size()));
|
||||
assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved);
|
||||
}
|
||||
|
||||
private void assertThatAllReferencePersonsWereStoredCorrectly(Map<String, Person> references, List<Person> saved) {
|
||||
|
||||
for (Person person : saved) {
|
||||
Person reference = references.get(person.getId());
|
||||
assertThat(person, is(equalTo(reference)));
|
||||
}
|
||||
}
|
||||
|
||||
private static class CustomizedPersonInformation implements MongoEntityInformation<Person, String> {
|
||||
|
||||
@Override
|
||||
|
||||
@@ -147,87 +147,87 @@ The first method shows a query for all people with the given lastname. The query
|
||||
|
||||
NOTE: Note that for version 1.0 we currently don't support referring to parameters that are mapped as `DBRef` in the domain class.
|
||||
|
||||
[cols="1,2,3", options="header"]
|
||||
[cols="1,2,3", options="header"]
|
||||
.Supported keywords for query methods
|
||||
|===
|
||||
| Keyword
|
||||
| Sample
|
||||
| Sample
|
||||
| Logical result
|
||||
|
||||
| `GreaterThan`
|
||||
| `findByAgeGreaterThan(int age)`
|
||||
| `GreaterThan`
|
||||
| `findByAgeGreaterThan(int age)`
|
||||
| `{"age" : {"$gt" : age}}`
|
||||
|
||||
| `GreaterThanEqual`
|
||||
| `findByAgeGreaterThanEqual(int age)`
|
||||
| `GreaterThanEqual`
|
||||
| `findByAgeGreaterThanEqual(int age)`
|
||||
| `{"age" : {"$gte" : age}}`
|
||||
|
||||
| `LessThan`
|
||||
| `findByAgeLessThan(int age)`
|
||||
| `LessThan`
|
||||
| `findByAgeLessThan(int age)`
|
||||
| `{"age" : {"$lt" : age}}`
|
||||
|
||||
| `LessThanEqual`
|
||||
| `findByAgeLessThanEqual(int age)`
|
||||
| `LessThanEqual`
|
||||
| `findByAgeLessThanEqual(int age)`
|
||||
| `{"age" : {"$lte" : age}}`
|
||||
|
||||
| `Between`
|
||||
| `findByAgeBetween(int from, int to)`
|
||||
| `Between`
|
||||
| `findByAgeBetween(int from, int to)`
|
||||
| `{"age" : {"$gt" : from, "$lt" : to}}`
|
||||
|
||||
| `In`
|
||||
| `In`
|
||||
| `findByAgeIn(Collection ages)`
|
||||
| `{"age" : {"$in" : [ages...]}}`
|
||||
|
||||
| `NotIn`
|
||||
| `findByAgeNotIn(Collection ages)`
|
||||
| `NotIn`
|
||||
| `findByAgeNotIn(Collection ages)`
|
||||
| `{"age" : {"$nin" : [ages...]}}`
|
||||
|
||||
| `IsNotNull, NotNull`
|
||||
| `findByFirstnameNotNull()`
|
||||
| `{"age" : {"$ne" : null}}`
|
||||
| `IsNotNull, NotNull`
|
||||
| `findByFirstnameNotNull()`
|
||||
| `{"firstname" : {"$ne" : null}}`
|
||||
|
||||
| `IsNull, Null`
|
||||
| `findByFirstnameNull()`
|
||||
| `{"age" : null}`
|
||||
| `IsNull, Null`
|
||||
| `findByFirstnameNull()`
|
||||
| `{"firstname" : null}`
|
||||
|
||||
| `Like`
|
||||
| `Like`
|
||||
| `findByFirstnameLike(String name)`
|
||||
| `{"age" : age} ( age as regex)`
|
||||
| `{"firstname" : name} ( name as regex)`
|
||||
|
||||
| `Regex`
|
||||
| `findByFirstnameRegex(String firstname)`
|
||||
| `Regex`
|
||||
| `findByFirstnameRegex(String firstname)`
|
||||
| `{"firstname" : {"$regex" : firstname }}`
|
||||
|
||||
| `(No keyword)`
|
||||
| `(No keyword)`
|
||||
| `findByFirstname(String name)`
|
||||
| `{"age" : name}`
|
||||
| `{"firstname" : name}`
|
||||
|
||||
| `Not`
|
||||
| `findByFirstnameNot(String name)`
|
||||
| `{"age" : {"$ne" : name}}`
|
||||
| `Not`
|
||||
| `findByFirstnameNot(String name)`
|
||||
| `{"firstname" : {"$ne" : name}}`
|
||||
|
||||
| `Near`
|
||||
| `findByLocationNear(Point point)`
|
||||
| `Near`
|
||||
| `findByLocationNear(Point point)`
|
||||
| `{"location" : {"$near" : [x,y]}}`
|
||||
|
||||
| `Within`
|
||||
| `findByLocationWithin(Circle circle)`
|
||||
| `Within`
|
||||
| `findByLocationWithin(Circle circle)`
|
||||
| `{"location" : {"$within" : {"$center" : [ [x, y], distance]}}}`
|
||||
|
||||
| `Within`
|
||||
| `findByLocationWithin(Box box)`
|
||||
| `Within`
|
||||
| `findByLocationWithin(Box box)`
|
||||
| `{"location" : {"$within" : {"$box" : [ [x1, y1], x2, y2]}}}True`
|
||||
|
||||
| `IsTrue, True`
|
||||
| `findByActiveIsTrue()`
|
||||
| `IsTrue, True`
|
||||
| `findByActiveIsTrue()`
|
||||
| `{"active" : true}`
|
||||
|
||||
| `IsFalse, False`
|
||||
| `findByActiveIsFalse()`
|
||||
| `IsFalse, False`
|
||||
| `findByActiveIsFalse()`
|
||||
| `{"active" : false}`
|
||||
|
||||
| `Exists`
|
||||
| `findByLocationExists(boolean exists)`
|
||||
| `Exists`
|
||||
| `findByLocationExists(boolean exists)`
|
||||
| `{"location" : {"$exists" : exists }}`
|
||||
|===
|
||||
|
||||
@@ -463,4 +463,4 @@ class RepositoryClient {
|
||||
List<Person> people = repository.findAll();
|
||||
}
|
||||
}
|
||||
----
|
||||
----
|
||||
|
||||
@@ -1538,7 +1538,8 @@ Note that the aggregation operations not listed here are currently not supported
|
||||
[[mongo.aggregation.projection]]
|
||||
=== Projection Expressions
|
||||
|
||||
Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined via the `project` method of the `Aggregate` class.
|
||||
Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined via the `project` method of the `Aggregate` class either by passing a list of `String`s or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API via the `and(String)` method and aliased via the `as(String)` method.
|
||||
Note that one can also define fields with aliases via the static factory method `Fields.field` of the aggregation framework that can then be used to construct a new `Fields` instance.
|
||||
|
||||
.Projection expression examples
|
||||
====
|
||||
|
||||
@@ -1,6 +1,112 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 1.6.2.RELEASE (2015-01-28)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR.
|
||||
* DATAMONGO-1147 - Remove manual array copy.
|
||||
* DATAMONGO-1145 - Upgrade MongoDB Java driver to 2.12.5.
|
||||
* DATAMONGO-1144 - Release 1.6.2.
|
||||
* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance.
|
||||
* DATAMONGO-1132 - The sample does not match the logical result in the MongoDB repositories section of the documentation.
|
||||
* DATAMONGO-1127 - Add support for geoNear queries with distance information.
|
||||
* DATAMONGO-1126 - Repository keyword query findByInId with pageable not returning correctly.
|
||||
* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents.
|
||||
* DATAMONGO-1121 - "Cycle found" false positive.
|
||||
* DATAMONGO-1120 - Pageable queries timeout or return incorrect counts.
|
||||
* DATAMONGO-1118 - Custom converters not used for map keys.
|
||||
* DATAMONGO-1108 - BasicMongoPersistentEntity doesn't need to parse expression on every invocation.
|
||||
* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate.
|
||||
* DATAMONGO-1094 - Wrong reference to @DocumentField in error message.
|
||||
* DATAMONGO-1093 - BasicQuery missing hashCode() and equals(…) methods.
|
||||
* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field...
|
||||
* DATAMONGO-1085 - Sort can not use the metamodel classes generated by QueryDSL.
|
||||
* DATAMONGO-1082 - Improve JavaDoc and reference documentation on alias usage in aggregation framework support.
|
||||
* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure.
|
||||
* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties.
|
||||
* DATAMONGO-1054 - Improve performance of saving entities by using insert(…) if possible.
|
||||
* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions.
|
||||
* DATAMONGO-712 - Another round of potential performance improvements.
|
||||
|
||||
|
||||
Changes in version 1.5.5.RELEASE (2015-01-27)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR.
|
||||
* DATAMONGO-1147 - Remove manual array copy.
|
||||
* DATAMONGO-1143 - Release 1.5.5.
|
||||
* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance.
|
||||
* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents.
|
||||
* DATAMONGO-1121 - "Cycle found" false positive.
|
||||
* DATAMONGO-1118 - Custom converters not used for map keys.
|
||||
* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate.
|
||||
* DATAMONGO-1094 - Wrong reference to @DocumentField in error message.
|
||||
* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field...
|
||||
* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure.
|
||||
* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties.
|
||||
* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted.
|
||||
* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria.
|
||||
* DATAMONGO-1063 - IllegalStateException using any().in().
|
||||
* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests.
|
||||
* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior.
|
||||
* DATAMONGO-1045 - Make sure Spring Data MongoDB can build against Spring 4.1.
|
||||
* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions.
|
||||
* DATAMONGO-1040 - deleteAll repository query don't use EntityMetadata collection name.
|
||||
* DATAMONGO-1039 - Polish implementation for cleaning up after tests.
|
||||
* DATAMONGO-712 - Another round of potential performance improvements.
|
||||
|
||||
|
||||
Changes in version 1.7.0.M1 (2014-12-01)
|
||||
----------------------------------------
|
||||
* DATAMONGO-1108 - BasicMongoPersistentEntity doesn't need to parse expression on every invocation.
|
||||
* DATAMONGO-1106 - Release 1.7 M1.
|
||||
* DATAMONGO-1105 - Add implementation for new QueryDslPredicateExecutor.findAll(OrderSpecifier<?>... orders).
|
||||
* DATAMONGO-1102 - Auto-register JSR-310 converters to support JDK 8 date/time types.
|
||||
* DATAMONGO-1101 - Add support for $bit to Update.
|
||||
* DATAMONGO-1100 - Adapt to new PersistentPropertyAccessor API.
|
||||
* DATAMONGO-1097 - Add support for $mul to Update.
|
||||
* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate.
|
||||
* DATAMONGO-1094 - Wrong reference to @DocumentField in error message.
|
||||
* DATAMONGO-1093 - BasicQuery missing hashCode() and equals(…) methods.
|
||||
* DATAMONGO-1092 - Ensure compatibility with MongoDB 2.8.0.rc0 and java driver 2.13.0-rc0.
|
||||
* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field…
|
||||
* DATAMONGO-1085 - Sort can not use the metamodel classes generated by QueryDSL.
|
||||
* DATAMONGO-1080 - AbstractMongoQuery must not eagerly post-process results.
|
||||
* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure.
|
||||
* DATAMONGO-1077 - Update removes positional operator $ in key when used on DBRef property.
|
||||
* DATAMONGO-1076 - Finalizer hit db on lazy dbrefs.
|
||||
* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties.
|
||||
* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted.
|
||||
* DATAMONGO-1070 - Query annotation with $oid leads to a parse error.
|
||||
* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria.
|
||||
* DATAMONGO-1063 - IllegalStateException using any().in().
|
||||
* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests.
|
||||
* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior.
|
||||
* DATAMONGO-1057 - AbstractMongoQuery.SlicedExecution#execute() skips every nth element.
|
||||
* DATAMONGO-1054 - Improve performance of saving entities by using insert(…) if possible.
|
||||
* DATAMONGO-1053 - In 1.6, any field in a mapped object named "language" will fail to map if it is a type other than String.
|
||||
* DATAMONGO-1050 - SimpleMongoRepository.findById(id, class) don't return ids for nested documents.
|
||||
* DATAMONGO-1049 - Reserved field name 'language' causes trouble.
|
||||
* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions.
|
||||
* DATAMONGO-943 - Add support for $position to Update $push $each.
|
||||
|
||||
|
||||
Changes in version 1.6.1.RELEASE (2014-10-30)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1080 - AbstractMongoQuery must not eagerly post-process results.
|
||||
* DATAMONGO-1079 - Release 1.6.1.
|
||||
* DATAMONGO-1077 - Update removes positional operator $ in key when used on DBRef property.
|
||||
* DATAMONGO-1076 - Finalizer hit db on lazy dbrefs.
|
||||
* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted.
|
||||
* DATAMONGO-1070 - Query annotation with $oid leads to a parse error.
|
||||
* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria.
|
||||
* DATAMONGO-1063 - IllegalStateException using any().in().
|
||||
* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests.
|
||||
* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior.
|
||||
* DATAMONGO-1057 - AbstractMongoQuery.SlicedExecution#execute() skips every nth element.
|
||||
* DATAMONGO-1053 - In 1.6, any field in a mapped object named "language" will fail to map if it is a type other than String.
|
||||
* DATAMONGO-1049 - Reserved field name 'language' causes trouble.
|
||||
|
||||
|
||||
Changes in version 1.6.0.RELEASE (2014-09-05)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1046 - Release 1.6 GA.
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
Spring Data MongoDB 1.6 GA
|
||||
Copyright (c) [2010-2014] Pivotal Software, Inc.
|
||||
Spring Data MongoDB 1.6.2
|
||||
Copyright (c) [2010-2015] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
You may not use this product except in compliance with the License.
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
You may not use this product except in compliance with the License.
|
||||
|
||||
This product may include a number of subcomponents with
|
||||
separate copyright notices and license terms. Your use of the source
|
||||
|
||||
Reference in New Issue
Block a user