Compare commits
50 Commits
labs/antor
...
2.0.3.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3399160acf | ||
|
|
32a8ee9b31 | ||
|
|
17cea70abc | ||
|
|
07731c39ba | ||
|
|
c5b580b82b | ||
|
|
9a1385186e | ||
|
|
704524d7f4 | ||
|
|
cc9a3ac8da | ||
|
|
acb68f3ca4 | ||
|
|
3088f0469e | ||
|
|
a1ae04881d | ||
|
|
6f55c66060 | ||
|
|
f86447bd04 | ||
|
|
1bb4324b2e | ||
|
|
856506f121 | ||
|
|
2a81dc75a8 | ||
|
|
58cd4c08ca | ||
|
|
344e019143 | ||
|
|
918b7e96bb | ||
|
|
fce7a5c1cb | ||
|
|
dbd2de8e0f | ||
|
|
0dbe331ab0 | ||
|
|
846ebcd91d | ||
|
|
9e0b5caeac | ||
|
|
cf70f5e5eb | ||
|
|
331dc6df6f | ||
|
|
a51dce2c90 | ||
|
|
c0cf1aa95b | ||
|
|
7104ffa543 | ||
|
|
28d2fb6680 | ||
|
|
140e26946f | ||
|
|
f4e730ce87 | ||
|
|
e3a83ebc42 | ||
|
|
f65c1e324e | ||
|
|
1dd0061f03 | ||
|
|
5ea860700c | ||
|
|
3dd653a702 | ||
|
|
f87847407b | ||
|
|
433a125c9e | ||
|
|
5827cb0971 | ||
|
|
0109bf6858 | ||
|
|
49d1555576 | ||
|
|
fdbb305b8e | ||
|
|
49dd03311a | ||
|
|
a86a3210e1 | ||
|
|
4b655abfb6 | ||
|
|
0963e6cf77 | ||
|
|
3e1b2c4bdb | ||
|
|
03e0e0c431 | ||
|
|
51900021a1 |
@@ -31,6 +31,8 @@ cache:
|
||||
directories:
|
||||
- $HOME/.m2
|
||||
|
||||
install: true
|
||||
install:
|
||||
- |-
|
||||
mongo admin --eval "db.adminCommand({setFeatureCompatibilityVersion: '3.4'});"
|
||||
|
||||
script: "mvn clean dependency:list test -P${PROFILE} -Dsort"
|
||||
|
||||
6
pom.xml
6
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.RELEASE</version>
|
||||
<version>2.0.3.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.0.0.RELEASE</version>
|
||||
<version>2.0.3.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -27,7 +27,7 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.0.0.RELEASE</springdata.commons>
|
||||
<springdata.commons>2.0.3.RELEASE</springdata.commons>
|
||||
<mongo>3.5.0</mongo>
|
||||
<mongo.reactivestreams>1.6.0</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.RELEASE</version>
|
||||
<version>2.0.3.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.RELEASE</version>
|
||||
<version>2.0.3.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>2.0.0.RELEASE</version>
|
||||
<version>2.0.3.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
@@ -1,214 +1,214 @@
|
||||
/*
|
||||
* Copyright 2011-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.Filters;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
this.mongoTemplate = mongoTemplate;
|
||||
}
|
||||
|
||||
public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) {
|
||||
this.entityManagerFactory = entityManagerFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass, Object id, final ChangeSet changeSet)
|
||||
throws DataAccessException, NotFoundException {
|
||||
|
||||
if (id == null) {
|
||||
log.debug("Unable to load MongoDB data for null id");
|
||||
return;
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entityClass);
|
||||
|
||||
final Document dbk = new Document();
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
|
||||
for (Document dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (cs == null) {
|
||||
log.debug("Flush: changeset was null, nothing to flush.");
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
if (mongoTemplate.getCollection(collName) == null) {
|
||||
mongoTemplate.createCollection(collName);
|
||||
}
|
||||
|
||||
for (String key : cs.getValues().keySet()) {
|
||||
if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) {
|
||||
Object value = cs.getValues().get(key);
|
||||
final Document dbQuery = new Document();
|
||||
dbQuery.put(ENTITY_ID, getPersistentId(entity, cs));
|
||||
dbQuery.put(ENTITY_CLASS, entity.getClass().getName());
|
||||
dbQuery.put(ENTITY_FIELD_NAME, key);
|
||||
final Document dbId = mongoTemplate.execute(collName, new CollectionCallback<Document>() {
|
||||
public Document doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
Document id = collection.find(dbQuery).first();
|
||||
return id;
|
||||
}
|
||||
});
|
||||
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
DeleteResult dr = collection.deleteMany(dbQuery);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
final Document dbDoc = new Document();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
if (dbId != null) {
|
||||
dbDoc.put("_id", dbId.get("_id"));
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
|
||||
if (dbId != null) {
|
||||
collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc);
|
||||
} else {
|
||||
|
||||
if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) {
|
||||
dbDoc.remove("_id");
|
||||
}
|
||||
collection.insertOne(dbDoc);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection the given entity type shall be persisted to.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getCollectionNameForEntity(Class<? extends ChangeSetBacked> entityClass) {
|
||||
return mongoTemplate.getCollectionName(entityClass);
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2011-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.Filters;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
this.mongoTemplate = mongoTemplate;
|
||||
}
|
||||
|
||||
public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) {
|
||||
this.entityManagerFactory = entityManagerFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass, Object id, final ChangeSet changeSet)
|
||||
throws DataAccessException, NotFoundException {
|
||||
|
||||
if (id == null) {
|
||||
log.debug("Unable to load MongoDB data for null id");
|
||||
return;
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entityClass);
|
||||
|
||||
final Document dbk = new Document();
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
|
||||
for (Document dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (cs == null) {
|
||||
log.debug("Flush: changeset was null, nothing to flush.");
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
if (mongoTemplate.getCollection(collName) == null) {
|
||||
mongoTemplate.createCollection(collName);
|
||||
}
|
||||
|
||||
for (String key : cs.getValues().keySet()) {
|
||||
if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) {
|
||||
Object value = cs.getValues().get(key);
|
||||
final Document dbQuery = new Document();
|
||||
dbQuery.put(ENTITY_ID, getPersistentId(entity, cs));
|
||||
dbQuery.put(ENTITY_CLASS, entity.getClass().getName());
|
||||
dbQuery.put(ENTITY_FIELD_NAME, key);
|
||||
final Document dbId = mongoTemplate.execute(collName, new CollectionCallback<Document>() {
|
||||
public Document doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
Document id = collection.find(dbQuery).first();
|
||||
return id;
|
||||
}
|
||||
});
|
||||
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
DeleteResult dr = collection.deleteMany(dbQuery);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
final Document dbDoc = new Document();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
if (dbId != null) {
|
||||
dbDoc.put("_id", dbId.get("_id"));
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
|
||||
if (dbId != null) {
|
||||
collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc);
|
||||
} else {
|
||||
|
||||
if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) {
|
||||
dbDoc.remove("_id");
|
||||
}
|
||||
collection.insertOne(dbDoc);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection the given entity type shall be persisted to.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getCollectionNameForEntity(Class<? extends ChangeSetBacked> entityClass) {
|
||||
return mongoTemplate.getCollectionName(entityClass);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.RELEASE</version>
|
||||
<version>2.0.3.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.0.0.RELEASE</version>
|
||||
<version>2.0.3.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -288,74 +288,9 @@
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
<plugins>
|
||||
|
||||
<plugin>
|
||||
<artifactId>kotlin-maven-plugin</artifactId>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<version>${kotlin}</version>
|
||||
<configuration>
|
||||
<jvmTarget>${source.level}</jvmTarget>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<sourceDirs>
|
||||
<sourceDir>${project.basedir}/src/main/kotlin</sourceDir>
|
||||
<sourceDir>${project.basedir}/src/main/java</sourceDir>
|
||||
</sourceDirs>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>test-compile</id>
|
||||
<phase>test-compile</phase>
|
||||
<goals>
|
||||
<goal>test-compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<sourceDirs>
|
||||
<sourceDir>${project.basedir}/src/test/kotlin</sourceDir>
|
||||
<sourceDir>${project.basedir}/src/test/java</sourceDir>
|
||||
</sourceDirs>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>default-compile</id>
|
||||
<phase>none</phase>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>default-testCompile</id>
|
||||
<phase>none</phase>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>java-compile</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>java-test-compile</id>
|
||||
<phase>test-compile</phase>
|
||||
<goals>
|
||||
<goal>testCompile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>com.mysema.maven</groupId>
|
||||
<artifactId>apt-maven-plugin</artifactId>
|
||||
@@ -384,7 +319,6 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>2.12</version>
|
||||
<configuration>
|
||||
<useFile>false</useFile>
|
||||
<includes>
|
||||
@@ -406,6 +340,8 @@
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
</project>
|
||||
|
||||
@@ -43,9 +43,10 @@ import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}.
|
||||
*
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
*/
|
||||
class DefaultScriptOperations implements ScriptOperations {
|
||||
@@ -141,7 +142,7 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
|
||||
Assert.hasText(scriptName, "ScriptName must not be null or empty!");
|
||||
|
||||
return mongoOperations.exists(query(where("name").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME);
|
||||
return mongoOperations.exists(query(where("_id").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -190,7 +191,7 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
* Generate a valid name for the {@literal JavaScript}. MongoDB requires an id of type String for scripts. Calling
|
||||
* scripts having {@link ObjectId} as id fails. Therefore we create a random UUID without {@code -} (as this won't
|
||||
* work) an prefix the result with {@link #SCRIPT_NAME_PREFIX}.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private static String generateScriptName() {
|
||||
|
||||
@@ -72,11 +72,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
String getCollectionName(Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the
|
||||
* MongoDB driver to convert the JSON string to a Document. Any errors that result from executing this command will be
|
||||
* Execute the a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to
|
||||
* obtain the {@link Document} holding the actual command. Any errors that result from executing this command will be
|
||||
* converted into Spring's DAO exception hierarchy.
|
||||
*
|
||||
* @param jsonCommand a MongoDB command expressed as a JSON string.
|
||||
* @param jsonCommand a MongoDB command expressed as a JSON string. Must not be {@literal null}.
|
||||
* @return a result object returned by the action.
|
||||
*/
|
||||
Document executeCommand(String jsonCommand);
|
||||
@@ -851,8 +851,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -908,8 +908,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
*/
|
||||
@@ -925,8 +925,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* http://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
|
||||
import com.mongodb.*;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.NonNull;
|
||||
@@ -127,14 +128,6 @@ import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.ResourceUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.Cursor;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.AggregateIterable;
|
||||
import com.mongodb.client.FindIterable;
|
||||
import com.mongodb.client.MapReduceIterable;
|
||||
@@ -1933,16 +1926,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
|
||||
AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context;
|
||||
Document command = aggregation.toDocument(collectionName, rootContext);
|
||||
Document commandResult = new BatchAggregationLoader(this, readPreference, Integer.MAX_VALUE)
|
||||
.aggregate(collectionName, aggregation, context);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
Document commandResult = executeCommand(command, this.readPreference);
|
||||
|
||||
return new AggregationResults<O>(returnPotentiallyMappedResults(outputType, commandResult, collectionName),
|
||||
return new AggregationResults<>(returnPotentiallyMappedResults(outputType, commandResult, collectionName),
|
||||
commandResult);
|
||||
}
|
||||
|
||||
@@ -2771,31 +2758,26 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private class ReadDocumentCallback<T> implements DocumentCallback<T> {
|
||||
|
||||
private final EntityReader<? super T, Bson> reader;
|
||||
private final Class<T> type;
|
||||
private final @NonNull EntityReader<? super T, Bson> reader;
|
||||
private final @NonNull Class<T> type;
|
||||
private final String collectionName;
|
||||
|
||||
public ReadDocumentCallback(EntityReader<? super T, Bson> reader, Class<T> type, String collectionName) {
|
||||
|
||||
Assert.notNull(reader, "EntityReader must not be null!");
|
||||
Assert.notNull(type, "Entity type must not be null!");
|
||||
|
||||
this.reader = reader;
|
||||
this.type = type;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public T doWith(Document object) {
|
||||
public T doWith(@Nullable Document object) {
|
||||
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<T>(object, type, collectionName));
|
||||
}
|
||||
|
||||
T source = reader.read(type, object);
|
||||
|
||||
if (null != source) {
|
||||
maybeEmitEvent(new AfterConvertEvent<T>(object, source, collectionName));
|
||||
}
|
||||
|
||||
return source;
|
||||
}
|
||||
}
|
||||
@@ -2830,10 +2812,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
Class<?> typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) ? entityType
|
||||
: targetType;
|
||||
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<T>(object, targetType, collectionName));
|
||||
}
|
||||
|
||||
Object source = reader.read(typeToRead, object);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
|
||||
if (result == null) {
|
||||
if (null != result) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(object, result, collectionName));
|
||||
}
|
||||
|
||||
@@ -2986,7 +2973,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
T doWith = delegate.doWith(content);
|
||||
|
||||
return new GeoResult<T>(doWith, new Distance(distance, metric));
|
||||
return new GeoResult<>(doWith, new Distance(distance, metric));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3074,4 +3061,148 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
public MongoDbFactory getMongoDbFactory() {
|
||||
return mongoDbFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link BatchAggregationLoader} is a little helper that can process cursor results returned by an aggregation
|
||||
* command execution. On presence of a {@literal nextBatch} indicated by presence of an {@code id} field in the
|
||||
* {@code cursor} another {@code getMore} command gets executed reading the next batch of documents until all results
|
||||
* are loaded.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
static class BatchAggregationLoader {
|
||||
|
||||
private static final String CURSOR_FIELD = "cursor";
|
||||
private static final String RESULT_FIELD = "result";
|
||||
private static final String BATCH_SIZE_FIELD = "batchSize";
|
||||
private static final String FIRST_BATCH = "firstBatch";
|
||||
private static final String NEXT_BATCH = "nextBatch";
|
||||
private static final String SERVER_USED = "serverUsed";
|
||||
private static final String OK = "ok";
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final ReadPreference readPreference;
|
||||
private final int batchSize;
|
||||
|
||||
BatchAggregationLoader(MongoTemplate template, ReadPreference readPreference, int batchSize) {
|
||||
|
||||
this.template = template;
|
||||
this.readPreference = readPreference;
|
||||
this.batchSize = batchSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run aggregation command and fetch all results.
|
||||
*/
|
||||
Document aggregate(String collectionName, Aggregation aggregation, AggregationOperationContext context) {
|
||||
|
||||
Document command = prepareAggregationCommand(collectionName, aggregation,
|
||||
context, batchSize);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
return mergeAggregationResults(aggregateBatched(command, collectionName, batchSize));
|
||||
}
|
||||
|
||||
/**
|
||||
* Pre process the aggregation command sent to the server by adding {@code cursor} options to match execution on
|
||||
* different server versions.
|
||||
*/
|
||||
private static Document prepareAggregationCommand(String collectionName, Aggregation aggregation,
|
||||
@Nullable AggregationOperationContext context, int batchSize) {
|
||||
|
||||
AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context;
|
||||
Document command = aggregation.toDocument(collectionName, rootContext);
|
||||
|
||||
if (!aggregation.getOptions().isExplain()) {
|
||||
command.put(CURSOR_FIELD, new Document(BATCH_SIZE_FIELD, batchSize));
|
||||
}
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private List<Document> aggregateBatched(Document command, String collectionName, int batchSize) {
|
||||
|
||||
List<Document> results = new ArrayList<>();
|
||||
|
||||
Document commandResult = template.executeCommand(command, readPreference);
|
||||
results.add(postProcessResult(commandResult));
|
||||
|
||||
while (hasNext(commandResult)) {
|
||||
|
||||
Document getMore = new Document("getMore", getNextBatchId(commandResult)) //
|
||||
.append("collection", collectionName) //
|
||||
.append(BATCH_SIZE_FIELD, batchSize);
|
||||
|
||||
commandResult = template.executeCommand(getMore, this.readPreference);
|
||||
results.add(postProcessResult(commandResult));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static Document postProcessResult(Document commandResult) {
|
||||
|
||||
if (!commandResult.containsKey(CURSOR_FIELD)) {
|
||||
return commandResult;
|
||||
}
|
||||
|
||||
Document resultObject = new Document(SERVER_USED, commandResult.get(SERVER_USED));
|
||||
resultObject.put(OK, commandResult.get(OK));
|
||||
|
||||
Document cursor = (Document) commandResult.get(CURSOR_FIELD);
|
||||
if (cursor.containsKey(FIRST_BATCH)) {
|
||||
resultObject.put(RESULT_FIELD, cursor.get(FIRST_BATCH));
|
||||
} else {
|
||||
resultObject.put(RESULT_FIELD, cursor.get(NEXT_BATCH));
|
||||
}
|
||||
|
||||
return resultObject;
|
||||
}
|
||||
|
||||
private static Document mergeAggregationResults(List<Document> batchResults) {
|
||||
|
||||
if (batchResults.size() == 1) {
|
||||
return batchResults.iterator().next();
|
||||
}
|
||||
|
||||
Document commandResult = new Document();
|
||||
List<Object> allResults = new ArrayList<>();
|
||||
|
||||
for (Document batchResult : batchResults) {
|
||||
|
||||
Collection documents = (Collection<?>) batchResult.get(RESULT_FIELD);
|
||||
if (!CollectionUtils.isEmpty(documents)) {
|
||||
allResults.addAll(documents);
|
||||
}
|
||||
}
|
||||
|
||||
// take general info from first batch
|
||||
commandResult.put(SERVER_USED, batchResults.iterator().next().get(SERVER_USED));
|
||||
commandResult.put(OK, batchResults.iterator().next().get(OK));
|
||||
|
||||
// and append the merged batchResults
|
||||
commandResult.put(RESULT_FIELD, allResults);
|
||||
|
||||
return commandResult;
|
||||
}
|
||||
|
||||
private static boolean hasNext(Document commandResult) {
|
||||
|
||||
if (!commandResult.containsKey(CURSOR_FIELD)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Object next = getNextBatchId(commandResult);
|
||||
return next != null && ((Number) next).longValue() != 0L;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static Object getNextBatchId(Document commandResult) {
|
||||
return ((Document) commandResult.get(CURSOR_FIELD)).get("id");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -601,6 +601,16 @@ public class Aggregation {
|
||||
return SerializationUtils.serializeToJsonSafely(toDocument("__collection__", DEFAULT_CONTEXT));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get {@link AggregationOptions} to apply.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 2.0.3
|
||||
*/
|
||||
public AggregationOptions getOptions() {
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes the system variables available in MongoDB aggregation framework pipeline expressions.
|
||||
*
|
||||
|
||||
@@ -32,6 +32,7 @@ import org.springframework.util.Assert;
|
||||
* Gateway to {@literal array} aggregation operations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.0
|
||||
*/
|
||||
public class ArrayOperators {
|
||||
@@ -224,15 +225,10 @@ public class ArrayOperators {
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(final AggregationExpression expression) {
|
||||
return new ArrayOperatorFactory.ReduceInitialValueBuilder() {
|
||||
public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(AggregationExpression expression) {
|
||||
|
||||
@Override
|
||||
public Reduce startingWith(Object initialValue) {
|
||||
return (usesFieldRef() ? Reduce.arrayOf(fieldReference) : Reduce.arrayOf(expression))
|
||||
.withInitialValue(initialValue).reduce(expression);
|
||||
}
|
||||
};
|
||||
return initialValue -> (usesFieldRef() ? Reduce.arrayOf(fieldReference)
|
||||
: Reduce.arrayOf(ArrayOperatorFactory.this.expression)).withInitialValue(initialValue).reduce(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -242,16 +238,10 @@ public class ArrayOperators {
|
||||
* @param expressions
|
||||
* @return
|
||||
*/
|
||||
public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(final PropertyExpression... expressions) {
|
||||
public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(PropertyExpression... expressions) {
|
||||
|
||||
return new ArrayOperatorFactory.ReduceInitialValueBuilder() {
|
||||
|
||||
@Override
|
||||
public Reduce startingWith(Object initialValue) {
|
||||
return (usesFieldRef() ? Reduce.arrayOf(fieldReference) : Reduce.arrayOf(expression))
|
||||
.withInitialValue(initialValue).reduce(expressions);
|
||||
}
|
||||
};
|
||||
return initialValue -> (usesFieldRef() ? Reduce.arrayOf(fieldReference) : Reduce.arrayOf(expression))
|
||||
.withInitialValue(initialValue).reduce(expressions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1104,12 +1094,10 @@ public class ArrayOperators {
|
||||
/**
|
||||
* Start creating new {@link Reduce}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param arrayValueExpression must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static InitialValueBuilder arrayOf(final AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "AggregationExpression must not be null");
|
||||
public static InitialValueBuilder arrayOf(final AggregationExpression arrayValueExpression) {
|
||||
|
||||
return new InitialValueBuilder() {
|
||||
|
||||
@@ -1124,14 +1112,14 @@ public class ArrayOperators {
|
||||
public Reduce reduce(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "AggregationExpression must not be null");
|
||||
return new Reduce(expression, initialValue, Collections.singletonList(expression));
|
||||
return new Reduce(arrayValueExpression, initialValue, Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Reduce reduce(PropertyExpression... expressions) {
|
||||
|
||||
Assert.notNull(expressions, "PropertyExpressions must not be null");
|
||||
return new Reduce(expression, initialValue, Arrays.<AggregationExpression> asList(expressions));
|
||||
return new Reduce(arrayValueExpression, initialValue, Arrays.asList(expressions));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -954,7 +954,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
: CollectionFactory.createCollection(collectionType, rawComponentType, sourceValue.size());
|
||||
|
||||
if (sourceValue.isEmpty()) {
|
||||
return getPotentiallyConvertedSimpleRead(items, collectionType);
|
||||
return getPotentiallyConvertedSimpleRead(items, targetType.getType());
|
||||
}
|
||||
|
||||
if (!DBRef.class.equals(rawComponentType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceValue)) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
* Copyright 2011-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,8 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.net.MalformedURLException;
|
||||
@@ -29,9 +27,9 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.ConditionalConverter;
|
||||
@@ -41,6 +39,7 @@ import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.core.query.Term;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -66,9 +65,9 @@ abstract class MongoConverters {
|
||||
* @return
|
||||
* @since 1.9
|
||||
*/
|
||||
public static Collection<Object> getConvertersToRegister() {
|
||||
static Collection<Object> getConvertersToRegister() {
|
||||
|
||||
List<Object> converters = new ArrayList<Object>();
|
||||
List<Object> converters = new ArrayList<>();
|
||||
|
||||
converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
@@ -86,6 +85,7 @@ abstract class MongoConverters {
|
||||
converters.add(AtomicLongToLongConverter.INSTANCE);
|
||||
converters.add(LongToAtomicLongConverter.INSTANCE);
|
||||
converters.add(IntegerToAtomicIntegerConverter.INSTANCE);
|
||||
converters.add(BinaryToByteArrayConverter.INSTANCE);
|
||||
|
||||
return converters;
|
||||
}
|
||||
@@ -95,7 +95,7 @@ abstract class MongoConverters {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public static enum ObjectIdToStringConverter implements Converter<ObjectId, String> {
|
||||
enum ObjectIdToStringConverter implements Converter<ObjectId, String> {
|
||||
INSTANCE;
|
||||
|
||||
public String convert(ObjectId id) {
|
||||
@@ -108,7 +108,7 @@ abstract class MongoConverters {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public static enum StringToObjectIdConverter implements Converter<String, ObjectId> {
|
||||
enum StringToObjectIdConverter implements Converter<String, ObjectId> {
|
||||
INSTANCE;
|
||||
|
||||
public ObjectId convert(String source) {
|
||||
@@ -121,7 +121,7 @@ abstract class MongoConverters {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public static enum ObjectIdToBigIntegerConverter implements Converter<ObjectId, BigInteger> {
|
||||
enum ObjectIdToBigIntegerConverter implements Converter<ObjectId, BigInteger> {
|
||||
INSTANCE;
|
||||
|
||||
public BigInteger convert(ObjectId source) {
|
||||
@@ -134,7 +134,7 @@ abstract class MongoConverters {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public static enum BigIntegerToObjectIdConverter implements Converter<BigInteger, ObjectId> {
|
||||
enum BigIntegerToObjectIdConverter implements Converter<BigInteger, ObjectId> {
|
||||
INSTANCE;
|
||||
|
||||
public ObjectId convert(BigInteger source) {
|
||||
@@ -142,7 +142,7 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
public static enum BigDecimalToStringConverter implements Converter<BigDecimal, String> {
|
||||
enum BigDecimalToStringConverter implements Converter<BigDecimal, String> {
|
||||
INSTANCE;
|
||||
|
||||
public String convert(BigDecimal source) {
|
||||
@@ -150,7 +150,7 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
public static enum StringToBigDecimalConverter implements Converter<String, BigDecimal> {
|
||||
enum StringToBigDecimalConverter implements Converter<String, BigDecimal> {
|
||||
INSTANCE;
|
||||
|
||||
public BigDecimal convert(String source) {
|
||||
@@ -158,7 +158,7 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
public static enum BigIntegerToStringConverter implements Converter<BigInteger, String> {
|
||||
enum BigIntegerToStringConverter implements Converter<BigInteger, String> {
|
||||
INSTANCE;
|
||||
|
||||
public String convert(BigInteger source) {
|
||||
@@ -166,7 +166,7 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
public static enum StringToBigIntegerConverter implements Converter<String, BigInteger> {
|
||||
enum StringToBigIntegerConverter implements Converter<String, BigInteger> {
|
||||
INSTANCE;
|
||||
|
||||
public BigInteger convert(String source) {
|
||||
@@ -174,7 +174,7 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
public static enum URLToStringConverter implements Converter<URL, String> {
|
||||
enum URLToStringConverter implements Converter<URL, String> {
|
||||
INSTANCE;
|
||||
|
||||
public String convert(URL source) {
|
||||
@@ -182,7 +182,7 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
public static enum StringToURLConverter implements Converter<String, URL> {
|
||||
enum StringToURLConverter implements Converter<String, URL> {
|
||||
INSTANCE;
|
||||
|
||||
private static final TypeDescriptor SOURCE = TypeDescriptor.valueOf(String.class);
|
||||
@@ -199,7 +199,7 @@ abstract class MongoConverters {
|
||||
}
|
||||
|
||||
@ReadingConverter
|
||||
public static enum DocumentToStringConverter implements Converter<Document, String> {
|
||||
enum DocumentToStringConverter implements Converter<Document, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -219,7 +219,7 @@ abstract class MongoConverters {
|
||||
* @since 1.6
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum TermToStringConverter implements Converter<Term, String> {
|
||||
enum TermToStringConverter implements Converter<Term, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -233,7 +233,7 @@ abstract class MongoConverters {
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
*/
|
||||
public static enum DocumentToNamedMongoScriptConverter implements Converter<Document, NamedMongoScript> {
|
||||
enum DocumentToNamedMongoScriptConverter implements Converter<Document, NamedMongoScript> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -255,7 +255,7 @@ abstract class MongoConverters {
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
*/
|
||||
public static enum NamedMongoScriptToDocumentConverter implements Converter<NamedMongoScript, Document> {
|
||||
enum NamedMongoScriptToDocumentConverter implements Converter<NamedMongoScript, Document> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -282,7 +282,7 @@ abstract class MongoConverters {
|
||||
* @since 1.9
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum CurrencyToStringConverter implements Converter<Currency, String> {
|
||||
enum CurrencyToStringConverter implements Converter<Currency, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -303,7 +303,7 @@ abstract class MongoConverters {
|
||||
* @since 1.9
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum StringToCurrencyConverter implements Converter<String, Currency> {
|
||||
enum StringToCurrencyConverter implements Converter<String, Currency> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -326,7 +326,7 @@ abstract class MongoConverters {
|
||||
* @since 1.9
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum NumberToNumberConverterFactory implements ConverterFactory<Number, Number>, ConditionalConverter {
|
||||
enum NumberToNumberConverterFactory implements ConverterFactory<Number, Number>, ConditionalConverter {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -391,7 +391,7 @@ abstract class MongoConverters {
|
||||
* @since 1.10
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum AtomicLongToLongConverter implements Converter<AtomicLong, Long> {
|
||||
enum AtomicLongToLongConverter implements Converter<AtomicLong, Long> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
@@ -407,7 +407,7 @@ abstract class MongoConverters {
|
||||
* @since 1.10
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum AtomicIntegerToIntegerConverter implements Converter<AtomicInteger, Integer> {
|
||||
enum AtomicIntegerToIntegerConverter implements Converter<AtomicInteger, Integer> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
@@ -423,7 +423,7 @@ abstract class MongoConverters {
|
||||
* @since 1.10
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum LongToAtomicLongConverter implements Converter<Long, AtomicLong> {
|
||||
enum LongToAtomicLongConverter implements Converter<Long, AtomicLong> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
@@ -439,7 +439,7 @@ abstract class MongoConverters {
|
||||
* @since 1.10
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum IntegerToAtomicIntegerConverter implements Converter<Integer, AtomicInteger> {
|
||||
enum IntegerToAtomicIntegerConverter implements Converter<Integer, AtomicInteger> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
@@ -447,4 +447,22 @@ abstract class MongoConverters {
|
||||
return source != null ? new AtomicInteger(source) : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} implementation converting {@link Binary} into {@code byte[]}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0.1
|
||||
*/
|
||||
@ReadingConverter
|
||||
enum BinaryToByteArrayConverter implements Converter<Binary, byte[]> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public byte[] convert(Binary source) {
|
||||
return source.getData();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -930,7 +930,7 @@ public class QueryMapper {
|
||||
|
||||
try {
|
||||
|
||||
PropertyPath path = PropertyPath.from(pathExpression.replaceAll("\\.\\d", ""), entity.getTypeInformation());
|
||||
PropertyPath path = PropertyPath.from(pathExpression.replaceAll("\\.\\d+", ""), entity.getTypeInformation());
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
|
||||
@@ -149,6 +149,8 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
@Override
|
||||
public void verify() {
|
||||
|
||||
super.verify();
|
||||
|
||||
verifyFieldUniqueness();
|
||||
verifyFieldTypes();
|
||||
}
|
||||
|
||||
@@ -25,11 +25,10 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.gridfs.GridFSFindIterable;
|
||||
import com.mongodb.gridfs.GridFSFile;
|
||||
|
||||
/**
|
||||
* Collection of operations to store and read files from MongoDB GridFS.
|
||||
*
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
@@ -40,98 +39,102 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name.
|
||||
*
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param filename must not be {@literal null} or empty.
|
||||
* @return the {@link GridFSFile} just created
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, String filename);
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name.
|
||||
*
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link GridFSFile} just created
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable Object metadata);
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name.
|
||||
*
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link GridFSFile} just created
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable Document metadata);
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name and content type.
|
||||
*
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param filename must not be {@literal null} or empty.
|
||||
* @param contentType can be {@literal null}.
|
||||
* @return the {@link GridFSFile} just created
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType);
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name using the given metadata. The metadata object will be
|
||||
* marshalled before writing.
|
||||
*
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param filename can be {@literal null} or empty.
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link GridFSFile} just created
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable Object metadata);
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name and content type using the given metadata. The metadata
|
||||
* object will be marshalled before writing.
|
||||
*
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param filename must not be {@literal null} or empty.
|
||||
* @param contentType can be {@literal null}.
|
||||
* @param metadata can be {@literal null}
|
||||
* @return the {@link GridFSFile} just created
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, @Nullable Object metadata);
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Object metadata);
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name using the given metadata.
|
||||
*
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param filename must not be {@literal null} or empty.
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link GridFSFile} just created
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable Document metadata);
|
||||
|
||||
/**
|
||||
* Stores the given content into a file with the given name and content type using the given metadata.
|
||||
*
|
||||
*
|
||||
* @param content must not be {@literal null}.
|
||||
* @param filename must not be {@literal null} or empty.
|
||||
* @param contentType can be {@literal null}.
|
||||
* @param metadata can be {@literal null}.
|
||||
* @return the {@link GridFSFile} just created
|
||||
* @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created.
|
||||
*/
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, @Nullable Document metadata);
|
||||
ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType,
|
||||
@Nullable Document metadata);
|
||||
|
||||
/**
|
||||
* Returns all files matching the given query. Note, that currently {@link Sort} criterias defined at the
|
||||
* {@link Query} will not be regarded as MongoDB does not support ordering for GridFS file access.
|
||||
*
|
||||
*
|
||||
* @see <a href="https://jira.mongodb.org/browse/JAVA-431">MongoDB Jira: JAVA-431</a>
|
||||
* @param query must not be {@literal null}.
|
||||
* @return
|
||||
* @return {@link GridFSFindIterable} to obtain results from. Eg. by calling
|
||||
* {@link GridFSFindIterable#into(java.util.Collection)}.
|
||||
*/
|
||||
GridFSFindIterable find(Query query);
|
||||
|
||||
/**
|
||||
* Returns a single file matching the given query or {@literal null} in case no file matches.
|
||||
*
|
||||
* Returns a single {@link com.mongodb.client.gridfs.model.GridFSFile} matching the given query or {@literal null} in
|
||||
* case no file matches.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -140,14 +143,14 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
|
||||
/**
|
||||
* Deletes all files matching the given {@link Query}.
|
||||
*
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
*/
|
||||
void delete(Query query);
|
||||
|
||||
/**
|
||||
* Returns all {@link GridFsResource} with the given file name.
|
||||
*
|
||||
* Returns the {@link GridFsResource} with the given file name.
|
||||
*
|
||||
* @param filename must not be {@literal null}.
|
||||
* @return the resource if it exists or {@literal null}.
|
||||
* @see ResourcePatternResolver#getResource(String)
|
||||
@@ -156,7 +159,7 @@ public interface GridFsOperations extends ResourcePatternResolver {
|
||||
|
||||
/**
|
||||
* Returns all {@link GridFsResource}s matching the given file name pattern.
|
||||
*
|
||||
*
|
||||
* @param filenamePattern must not be {@literal null}.
|
||||
* @return
|
||||
* @see ResourcePatternResolver#getResources(String)
|
||||
|
||||
@@ -18,18 +18,22 @@ package org.springframework.data.mongodb.gridfs;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.data.util.Optionals;
|
||||
|
||||
import com.mongodb.MongoGridFSException;
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
import com.mongodb.gridfs.GridFSDBFile;
|
||||
|
||||
/**
|
||||
* {@link GridFSDBFile} based {@link Resource} implementation.
|
||||
*
|
||||
* {@link GridFSFile} based {@link Resource} implementation.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Hartmut Lang
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class GridFsResource extends InputStreamResource {
|
||||
|
||||
@@ -38,8 +42,8 @@ public class GridFsResource extends InputStreamResource {
|
||||
private final GridFSFile file;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GridFsResource} from the given {@link GridFSDBFile}.
|
||||
*
|
||||
* Creates a new {@link GridFsResource} from the given {@link GridFSFile}.
|
||||
*
|
||||
* @param file must not be {@literal null}.
|
||||
*/
|
||||
public GridFsResource(GridFSFile file) {
|
||||
@@ -47,8 +51,8 @@ public class GridFsResource extends InputStreamResource {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GridFsResource} from the given {@link GridFSDBFile} and {@link InputStream}.
|
||||
*
|
||||
* Creates a new {@link GridFsResource} from the given {@link GridFSFile} and {@link InputStream}.
|
||||
*
|
||||
* @param file must not be {@literal null}.
|
||||
* @param inputStream must not be {@literal null}.
|
||||
*/
|
||||
@@ -87,8 +91,8 @@ public class GridFsResource extends InputStreamResource {
|
||||
|
||||
/**
|
||||
* Returns the {@link Resource}'s id.
|
||||
*
|
||||
* @return
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public Object getId() {
|
||||
return file.getId();
|
||||
@@ -96,14 +100,18 @@ public class GridFsResource extends InputStreamResource {
|
||||
|
||||
/**
|
||||
* Returns the {@link Resource}'s content type.
|
||||
*
|
||||
* @return
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws com.mongodb.MongoGridFSException in case no content type declared on {@link GridFSFile#getMetadata()} nor
|
||||
* provided via {@link GridFSFile#getContentType()}.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public String getContentType() {
|
||||
|
||||
String contentType = file.getMetadata().get(CONTENT_TYPE_FIELD, String.class);
|
||||
|
||||
return contentType != null ? contentType : file.getContentType();
|
||||
return Optionals
|
||||
.firstNonEmpty(
|
||||
() -> Optional.ofNullable(file.getMetadata()).map(it -> it.get(CONTENT_TYPE_FIELD, String.class)),
|
||||
() -> Optional.ofNullable(file.getContentType()))
|
||||
.orElseThrow(() -> new MongoGridFSException("No contentType data for this GridFS file"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ inline fun <reified T : Any> MongoOperations.getCollectionName(): String =
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.execute(action: CollectionCallback<T>): T =
|
||||
inline fun <reified T : Any> MongoOperations.execute(action: CollectionCallback<T>): T? =
|
||||
execute(T::class.java, action)
|
||||
|
||||
/**
|
||||
@@ -278,7 +278,7 @@ inline fun <reified T : Any> MongoOperations.geoNear(near: NearQuery, collection
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findOne(query: Query, collectionName: String? = null): T =
|
||||
inline fun <reified T : Any> MongoOperations.findOne(query: Query, collectionName: String? = null): T? =
|
||||
if (collectionName != null) findOne(query, T::class.java, collectionName) else findOne(query, T::class.java)
|
||||
|
||||
/**
|
||||
@@ -318,7 +318,7 @@ inline fun <reified T : Any> MongoOperations.find(query: Query, collectionName:
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findById(id: Any, collectionName: String? = null): T =
|
||||
inline fun <reified T : Any> MongoOperations.findById(id: Any, collectionName: String? = null): T? =
|
||||
if (collectionName != null) findById(id, T::class.java, collectionName)
|
||||
else findById(id, T::class.java)
|
||||
|
||||
@@ -328,7 +328,7 @@ inline fun <reified T : Any> MongoOperations.findById(id: Any, collectionName: S
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findAndModify(query: Query, update: Update, options: FindAndModifyOptions, collectionName: String? = null): T =
|
||||
inline fun <reified T : Any> MongoOperations.findAndModify(query: Query, update: Update, options: FindAndModifyOptions, collectionName: String? = null): T? =
|
||||
if (collectionName != null) findAndModify(query, update, options, T::class.java, collectionName)
|
||||
else findAndModify(query, update, options, T::class.java)
|
||||
|
||||
@@ -338,7 +338,7 @@ inline fun <reified T : Any> MongoOperations.findAndModify(query: Query, update:
|
||||
* @author Sebastien Deleuze
|
||||
* @since 2.0
|
||||
*/
|
||||
inline fun <reified T : Any> MongoOperations.findAndRemove(query: Query, collectionName: String? = null): T =
|
||||
inline fun <reified T : Any> MongoOperations.findAndRemove(query: Query, collectionName: String? = null): T? =
|
||||
if (collectionName != null) findAndRemove(query, T::class.java, collectionName)
|
||||
else findAndRemove(query, T::class.java)
|
||||
|
||||
@@ -414,7 +414,6 @@ inline fun <reified T : Any> MongoOperations.updateFirst(query: Query, update: U
|
||||
if (collectionName != null) updateFirst(query, update, T::class.java, collectionName)
|
||||
else updateFirst(query, update, T::class.java)
|
||||
|
||||
|
||||
/**
|
||||
* Extension for [MongoOperations.updateMulti] providing a [KClass] based variant.
|
||||
*
|
||||
|
||||
@@ -1,105 +1,105 @@
|
||||
/*
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import org.bson.Document;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceReplicaSetTests {
|
||||
|
||||
@Autowired private ApplicationContext ctx;
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testParsingMongoWithReplicaSets() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("replicaSetMongo"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&replicaSetMongo");
|
||||
|
||||
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
|
||||
|
||||
assertThat(replicaSetSeeds, is(notNullValue()));
|
||||
assertThat(replicaSetSeeds, hasItems(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001),
|
||||
new ServerAddress(InetAddress.getByName("localhost"), 10002)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testParsingWithPropertyPlaceHolder() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("manyReplicaSetMongo"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&manyReplicaSetMongo");
|
||||
|
||||
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
|
||||
|
||||
assertThat(replicaSetSeeds, is(notNullValue()));
|
||||
assertThat(replicaSetSeeds, hasSize(3));
|
||||
|
||||
List<Integer> ports = new ArrayList<Integer>();
|
||||
for (ServerAddress replicaSetSeed : replicaSetSeeds) {
|
||||
ports.add(replicaSetSeed.getPort());
|
||||
}
|
||||
|
||||
assertThat(ports, hasItems(27017, 27018, 27019));
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore("CI infrastructure does not yet support replica sets")
|
||||
public void testMongoWithReplicaSets() {
|
||||
|
||||
MongoClient mongo = ctx.getBean(MongoClient.class);
|
||||
assertEquals(2, mongo.getAllAddress().size());
|
||||
List<ServerAddress> servers = mongo.getAllAddress();
|
||||
assertEquals("127.0.0.1", servers.get(0).getHost());
|
||||
assertEquals("localhost", servers.get(1).getHost());
|
||||
assertEquals(10001, servers.get(0).getPort());
|
||||
assertEquals(10002, servers.get(1).getPort());
|
||||
|
||||
MongoTemplate template = new MongoTemplate(mongo, "admin");
|
||||
Document result = template.executeCommand("{replSetGetStatus : 1}");
|
||||
assertEquals("blort", result.get("set").toString());
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import org.bson.Document;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.ServerAddress;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceReplicaSetTests {
|
||||
|
||||
@Autowired private ApplicationContext ctx;
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testParsingMongoWithReplicaSets() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("replicaSetMongo"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&replicaSetMongo");
|
||||
|
||||
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
|
||||
|
||||
assertThat(replicaSetSeeds, is(notNullValue()));
|
||||
assertThat(replicaSetSeeds, hasItems(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001),
|
||||
new ServerAddress(InetAddress.getByName("localhost"), 10002)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testParsingWithPropertyPlaceHolder() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("manyReplicaSetMongo"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&manyReplicaSetMongo");
|
||||
|
||||
List<ServerAddress> replicaSetSeeds = (List<ServerAddress>) ReflectionTestUtils.getField(mfb, "replicaSetSeeds");
|
||||
|
||||
assertThat(replicaSetSeeds, is(notNullValue()));
|
||||
assertThat(replicaSetSeeds, hasSize(3));
|
||||
|
||||
List<Integer> ports = new ArrayList<Integer>();
|
||||
for (ServerAddress replicaSetSeed : replicaSetSeeds) {
|
||||
ports.add(replicaSetSeed.getPort());
|
||||
}
|
||||
|
||||
assertThat(ports, hasItems(27017, 27018, 27019));
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore("CI infrastructure does not yet support replica sets")
|
||||
public void testMongoWithReplicaSets() {
|
||||
|
||||
MongoClient mongo = ctx.getBean(MongoClient.class);
|
||||
assertEquals(2, mongo.getAllAddress().size());
|
||||
List<ServerAddress> servers = mongo.getAllAddress();
|
||||
assertEquals("127.0.0.1", servers.get(0).getHost());
|
||||
assertEquals("localhost", servers.get(1).getHost());
|
||||
assertEquals(10001, servers.get(0).getPort());
|
||||
assertEquals(10002, servers.get(1).getPort());
|
||||
|
||||
MongoTemplate template = new MongoTemplate(mongo, "admin");
|
||||
Document result = template.executeCommand("{replSetGetStatus : 1}");
|
||||
assertEquals("blort", result.get("set").toString());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,239 +1,239 @@
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.*;
|
||||
import static org.springframework.data.mongodb.util.MongoClientVersion.*;
|
||||
import static org.springframework.test.util.ReflectionTestUtils.*;
|
||||
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.gridfs.GridFsOperations;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.MongoOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Integration tests for the MongoDB namespace.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceTests {
|
||||
|
||||
@Autowired ApplicationContext ctx;
|
||||
|
||||
@Test
|
||||
public void testMongoSingleton() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("noAttrMongo"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&noAttrMongo");
|
||||
|
||||
assertNull(getField(mfb, "host"));
|
||||
assertNull(getField(mfb, "port"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMongoSingletonWithAttributes() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("defaultMongo"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&defaultMongo");
|
||||
|
||||
String host = (String) getField(mfb, "host");
|
||||
Integer port = (Integer) getField(mfb, "port");
|
||||
|
||||
assertEquals("localhost", host);
|
||||
assertEquals(new Integer(27017), port);
|
||||
|
||||
MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions");
|
||||
assertFalse("By default socketFactory should not be a SSLSocketFactory",
|
||||
options.getSocketFactory() instanceof SSLSocketFactory);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-764
|
||||
public void testMongoSingletonWithSslEnabled() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoSsl"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoSsl");
|
||||
|
||||
MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions");
|
||||
assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1490
|
||||
public void testMongoClientSingletonWithSslEnabled() {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoClientSsl"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoClientSsl");
|
||||
|
||||
MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions");
|
||||
assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-764
|
||||
public void testMongoSingletonWithSslEnabledAndCustomSslSocketFactory() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoSslWithCustomSslFactory"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoSslWithCustomSslFactory");
|
||||
|
||||
SSLSocketFactory customSslSocketFactory = ctx.getBean("customSslSocketFactory", SSLSocketFactory.class);
|
||||
MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions");
|
||||
|
||||
assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory);
|
||||
assertSame(customSslSocketFactory, options.getSocketFactory());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSecondMongoDbFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("secondMongoDbFactory"));
|
||||
MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("secondMongoDbFactory");
|
||||
|
||||
MongoClient mongo = (MongoClient) getField(dbf, "mongoClient");
|
||||
assertEquals("127.0.0.1", mongo.getAddress().getHost());
|
||||
assertEquals(27017, mongo.getAddress().getPort());
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-789
|
||||
public void testThirdMongoDbFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("thirdMongoDbFactory"));
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("thirdMongoDbFactory");
|
||||
MongoClient mongo = (MongoClient) getField(dbf, "mongoClient");
|
||||
|
||||
assertEquals("127.0.0.1", mongo.getAddress().getHost());
|
||||
assertEquals(27017, mongo.getAddress().getPort());
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-140
|
||||
public void testMongoTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoTemplate"));
|
||||
MongoOperations operations = (MongoOperations) ctx.getBean("mongoTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "mongoConverter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-140
|
||||
public void testSecondMongoTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("anotherMongoTemplate"));
|
||||
MongoOperations operations = (MongoOperations) ctx.getBean("anotherMongoTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
WriteConcern writeConcern = (WriteConcern) getField(operations, "writeConcern");
|
||||
assertEquals(WriteConcern.SAFE, writeConcern);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-628
|
||||
public void testGridFsTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("gridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("gridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-628
|
||||
public void testSecondGridFsTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("secondGridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("secondGridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
assertEquals(null, getField(operations, "bucket"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-823
|
||||
public void testThirdGridFsTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("thirdGridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("thirdGridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
assertEquals("bucketString", getField(operations, "bucket"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testMongoSingletonWithPropertyPlaceHolders() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoClient"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoClient");
|
||||
|
||||
String host = (String) getField(mfb, "host");
|
||||
Integer port = (Integer) getField(mfb, "port");
|
||||
|
||||
assertEquals("127.0.0.1", host);
|
||||
assertEquals(new Integer(27017), port);
|
||||
|
||||
MongoClient mongo = mfb.getObject();
|
||||
MongoClientOptions mongoOpts = mongo.getMongoClientOptions();
|
||||
|
||||
assertEquals(8, mongoOpts.getConnectionsPerHost());
|
||||
assertEquals(1000, mongoOpts.getConnectTimeout());
|
||||
assertEquals(1500, mongoOpts.getMaxWaitTime());
|
||||
|
||||
assertEquals(1500, mongoOpts.getSocketTimeout());
|
||||
assertEquals(4, mongoOpts.getThreadsAllowedToBlockForConnectionMultiplier());
|
||||
|
||||
// TODO: check the damned defaults
|
||||
// assertEquals("w", mongoOpts.getWriteConcern().getW());
|
||||
// assertEquals(0, mongoOpts.getWriteConcern().getWtimeout());
|
||||
// assertEquals(true, mongoOpts.getWriteConcern().fsync());
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.*;
|
||||
import static org.springframework.data.mongodb.util.MongoClientVersion.*;
|
||||
import static org.springframework.test.util.ReflectionTestUtils.*;
|
||||
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.gridfs.GridFsOperations;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.MongoOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Integration tests for the MongoDB namespace.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration
|
||||
public class MongoNamespaceTests {
|
||||
|
||||
@Autowired ApplicationContext ctx;
|
||||
|
||||
@Test
|
||||
public void testMongoSingleton() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("noAttrMongo"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&noAttrMongo");
|
||||
|
||||
assertNull(getField(mfb, "host"));
|
||||
assertNull(getField(mfb, "port"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMongoSingletonWithAttributes() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("defaultMongo"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&defaultMongo");
|
||||
|
||||
String host = (String) getField(mfb, "host");
|
||||
Integer port = (Integer) getField(mfb, "port");
|
||||
|
||||
assertEquals("localhost", host);
|
||||
assertEquals(new Integer(27017), port);
|
||||
|
||||
MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions");
|
||||
assertFalse("By default socketFactory should not be a SSLSocketFactory",
|
||||
options.getSocketFactory() instanceof SSLSocketFactory);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-764
|
||||
public void testMongoSingletonWithSslEnabled() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoSsl"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoSsl");
|
||||
|
||||
MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions");
|
||||
assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1490
|
||||
public void testMongoClientSingletonWithSslEnabled() {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoClientSsl"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoClientSsl");
|
||||
|
||||
MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions");
|
||||
assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-764
|
||||
public void testMongoSingletonWithSslEnabledAndCustomSslSocketFactory() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoSslWithCustomSslFactory"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoSslWithCustomSslFactory");
|
||||
|
||||
SSLSocketFactory customSslSocketFactory = ctx.getBean("customSslSocketFactory", SSLSocketFactory.class);
|
||||
MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions");
|
||||
|
||||
assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory);
|
||||
assertSame(customSslSocketFactory, options.getSocketFactory());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSecondMongoDbFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("secondMongoDbFactory"));
|
||||
MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("secondMongoDbFactory");
|
||||
|
||||
MongoClient mongo = (MongoClient) getField(dbf, "mongoClient");
|
||||
assertEquals("127.0.0.1", mongo.getAddress().getHost());
|
||||
assertEquals(27017, mongo.getAddress().getPort());
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-789
|
||||
public void testThirdMongoDbFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("thirdMongoDbFactory"));
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("thirdMongoDbFactory");
|
||||
MongoClient mongo = (MongoClient) getField(dbf, "mongoClient");
|
||||
|
||||
assertEquals("127.0.0.1", mongo.getAddress().getHost());
|
||||
assertEquals(27017, mongo.getAddress().getPort());
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-140
|
||||
public void testMongoTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoTemplate"));
|
||||
MongoOperations operations = (MongoOperations) ctx.getBean("mongoTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "mongoConverter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-140
|
||||
public void testSecondMongoTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("anotherMongoTemplate"));
|
||||
MongoOperations operations = (MongoOperations) ctx.getBean("anotherMongoTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
WriteConcern writeConcern = (WriteConcern) getField(operations, "writeConcern");
|
||||
assertEquals(WriteConcern.SAFE, writeConcern);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-628
|
||||
public void testGridFsTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("gridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("gridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-628
|
||||
public void testSecondGridFsTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("secondGridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("secondGridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
assertEquals(null, getField(operations, "bucket"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-823
|
||||
public void testThirdGridFsTemplateFactory() {
|
||||
|
||||
assertTrue(ctx.containsBean("thirdGridFsTemplate"));
|
||||
GridFsOperations operations = (GridFsOperations) ctx.getBean("thirdGridFsTemplate");
|
||||
|
||||
MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory");
|
||||
assertEquals("database", getField(dbf, "databaseName"));
|
||||
assertEquals("bucketString", getField(operations, "bucket"));
|
||||
|
||||
MongoConverter converter = (MongoConverter) getField(operations, "converter");
|
||||
assertNotNull(converter);
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testMongoSingletonWithPropertyPlaceHolders() throws Exception {
|
||||
|
||||
assertTrue(ctx.containsBean("mongoClient"));
|
||||
MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoClient");
|
||||
|
||||
String host = (String) getField(mfb, "host");
|
||||
Integer port = (Integer) getField(mfb, "port");
|
||||
|
||||
assertEquals("127.0.0.1", host);
|
||||
assertEquals(new Integer(27017), port);
|
||||
|
||||
MongoClient mongo = mfb.getObject();
|
||||
MongoClientOptions mongoOpts = mongo.getMongoClientOptions();
|
||||
|
||||
assertEquals(8, mongoOpts.getConnectionsPerHost());
|
||||
assertEquals(1000, mongoOpts.getConnectTimeout());
|
||||
assertEquals(1500, mongoOpts.getMaxWaitTime());
|
||||
|
||||
assertEquals(1500, mongoOpts.getSocketTimeout());
|
||||
assertEquals(4, mongoOpts.getThreadsAllowedToBlockForConnectionMultiplier());
|
||||
|
||||
// TODO: check the damned defaults
|
||||
// assertEquals("w", mongoOpts.getWriteConcern().getW());
|
||||
// assertEquals(0, mongoOpts.getWriteConcern().getWtimeout());
|
||||
// assertEquals(true, mongoOpts.getWriteConcern().fsync());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static java.util.Collections.*;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate.BatchAggregationLoader;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link BatchAggregationLoader}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class BatchAggregationLoaderUnitTests {
|
||||
|
||||
static final TypedAggregation<Person> AGGREGATION = newAggregation(Person.class,
|
||||
project().and("firstName").as("name"));
|
||||
|
||||
@Mock MongoTemplate template;
|
||||
@Mock Document aggregationResult;
|
||||
@Mock Document getMoreResult;
|
||||
|
||||
BatchAggregationLoader loader;
|
||||
|
||||
Document luke = new Document("name", "luke");
|
||||
Document han = new Document("name", "han");
|
||||
Document cursorWithoutMore = new Document("firstBatch", singletonList(luke));
|
||||
Document cursorWithMore = new Document("id", 123).append("firstBatch", singletonList(luke));
|
||||
Document cursorWithNoMore = new Document("id", 0).append("nextBatch", singletonList(han));
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
loader = new BatchAggregationLoader(template, ReadPreference.primary(), 10);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1824
|
||||
public void shouldLoadWithoutCursor() {
|
||||
|
||||
when(template.executeCommand(any(Document.class), any(ReadPreference.class))).thenReturn(aggregationResult);
|
||||
when(aggregationResult.get("result")).thenReturn(singletonList(luke));
|
||||
|
||||
Document result = loader.aggregate("person", AGGREGATION, Aggregation.DEFAULT_CONTEXT);
|
||||
assertThat((List) result.get("result")).contains(luke);
|
||||
|
||||
verify(template).executeCommand(any(Document.class), any(ReadPreference.class));
|
||||
verifyNoMoreInteractions(template);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1824
|
||||
public void shouldLoadJustOneBatchWhenAlreadyDoneWithFirst() {
|
||||
|
||||
when(template.executeCommand(any(Document.class), any(ReadPreference.class))).thenReturn(aggregationResult);
|
||||
when(aggregationResult.containsKey("cursor")).thenReturn(true);
|
||||
when(aggregationResult.get("cursor")).thenReturn(cursorWithoutMore);
|
||||
|
||||
Document result = loader.aggregate("person", AGGREGATION, Aggregation.DEFAULT_CONTEXT);
|
||||
|
||||
assertThat((List) result.get("result")).contains(luke);
|
||||
|
||||
verify(template).executeCommand(any(Document.class), any(ReadPreference.class));
|
||||
verifyNoMoreInteractions(template);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1824
|
||||
public void shouldBatchLoadWhenRequired() {
|
||||
|
||||
when(template.executeCommand(any(Document.class), any(ReadPreference.class))).thenReturn(aggregationResult)
|
||||
.thenReturn(getMoreResult);
|
||||
when(aggregationResult.containsKey("cursor")).thenReturn(true);
|
||||
when(aggregationResult.get("cursor")).thenReturn(cursorWithMore);
|
||||
when(getMoreResult.containsKey("cursor")).thenReturn(true);
|
||||
when(getMoreResult.get("cursor")).thenReturn(cursorWithNoMore);
|
||||
|
||||
Document result = loader.aggregate("person", AGGREGATION, Aggregation.DEFAULT_CONTEXT);
|
||||
assertThat((List) result.get("result")).containsSequence(luke, han);
|
||||
|
||||
verify(template, times(2)).executeCommand(any(Document.class), any(ReadPreference.class));
|
||||
verifyNoMoreInteractions(template);
|
||||
}
|
||||
}
|
||||
@@ -15,8 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
@@ -66,9 +65,9 @@ public abstract class DocumentTestUtils {
|
||||
*/
|
||||
public static Document getAsDocument(List<?> source, int index) {
|
||||
|
||||
assertThat(source.size(), greaterThanOrEqualTo(index + 1));
|
||||
assertThat(source.size()).isGreaterThanOrEqualTo(index + 1);
|
||||
Object value = source.get(index);
|
||||
assertThat(value, is(instanceOf(Document.class)));
|
||||
assertThat(value).isInstanceOf(Document.class);
|
||||
return (Document) value;
|
||||
}
|
||||
|
||||
@@ -76,8 +75,8 @@ public abstract class DocumentTestUtils {
|
||||
public static <T> T getTypedValue(Document source, String key, Class<T> type) {
|
||||
|
||||
Object value = source.get(key);
|
||||
assertThat(value, is(notNullValue()));
|
||||
assertThat(value, is(instanceOf(type)));
|
||||
assertThat(value).isNotNull();
|
||||
assertThat(value).isInstanceOf(type);
|
||||
|
||||
return (T) value;
|
||||
}
|
||||
@@ -92,8 +91,8 @@ public abstract class DocumentTestUtils {
|
||||
while (keyIterator.hasNext()) {
|
||||
String key = keyIterator.next();
|
||||
if (key.equals("_class")) {
|
||||
assertThat((String) document.get(key), is(equalTo(expectedTypeString)));
|
||||
assertThat(keyIterator.hasNext(), is(false));
|
||||
assertThat(document.get(key)).isEqualTo(expectedTypeString);
|
||||
assertThat(keyIterator.hasNext()).isFalse();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,61 +1,61 @@
|
||||
/*
|
||||
* Copyright 2002-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* This test class assumes that you are already running the MongoDB server.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class MongoAdminIntegrationTests {
|
||||
|
||||
private static final Log logger = LogFactory.getLog(MongoAdminIntegrationTests.class);
|
||||
|
||||
@SuppressWarnings("unused") private DB testAdminDb;
|
||||
|
||||
@Autowired MongoClient mongoClient;
|
||||
|
||||
MongoAdmin mongoAdmin;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
mongoAdmin = new MongoAdmin(mongoClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void serverStats() {
|
||||
logger.info("stats = " + mongoAdmin.getServerStatus());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void databaseStats() {
|
||||
logger.info(mongoAdmin.getDatabaseStats("testAdminDb"));
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2002-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* This test class assumes that you are already running the MongoDB server.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class MongoAdminIntegrationTests {
|
||||
|
||||
private static final Log logger = LogFactory.getLog(MongoAdminIntegrationTests.class);
|
||||
|
||||
@SuppressWarnings("unused") private DB testAdminDb;
|
||||
|
||||
@Autowired MongoClient mongoClient;
|
||||
|
||||
MongoAdmin mongoAdmin;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
mongoAdmin = new MongoAdmin(mongoClient);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void serverStats() {
|
||||
logger.info("stats = " + mongoAdmin.getServerStatus());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void databaseStats() {
|
||||
logger.info(mongoAdmin.getDatabaseStats("testAdminDb"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,6 +131,8 @@ public class MongoTemplateTests {
|
||||
.parse("2.4");
|
||||
private static final org.springframework.data.util.Version THREE_DOT_FOUR = org.springframework.data.util.Version
|
||||
.parse("3.4");
|
||||
private static final org.springframework.data.util.Version THREE_DOT_SIX = org.springframework.data.util.Version
|
||||
.parse("3.6");
|
||||
|
||||
@Autowired MongoTemplate template;
|
||||
@Autowired MongoDbFactory factory;
|
||||
@@ -2394,6 +2396,8 @@ public class MongoTemplateTests {
|
||||
@Test // DATAMONGO-354
|
||||
public void testUpdateShouldAllowMultiplePushAll() {
|
||||
|
||||
assumeThat(mongoVersion.isLessThan(THREE_DOT_SIX), is(true));
|
||||
|
||||
DocumentWithMultipleCollections doc = new DocumentWithMultipleCollections();
|
||||
doc.id = "1234";
|
||||
doc.string1 = Arrays.asList("spring");
|
||||
|
||||
@@ -129,7 +129,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
|
||||
when(db.runCommand(Mockito.any(), Mockito.any(Class.class))).thenReturn(commandResultDocument);
|
||||
when(collection.find(Mockito.any(org.bson.Document.class))).thenReturn(findIterable);
|
||||
when(collection.mapReduce(Mockito.any(), Mockito.any())).thenReturn(mapReduceIterable);
|
||||
when(collection.count(any(), any())).thenReturn(1L);
|
||||
when(collection.count(any(Bson.class), any(CountOptions.class))).thenReturn(1L);
|
||||
when(findIterable.projection(Mockito.any())).thenReturn(findIterable);
|
||||
when(findIterable.sort(Mockito.any(org.bson.Document.class))).thenReturn(findIterable);
|
||||
when(findIterable.modifiers(Mockito.any(org.bson.Document.class))).thenReturn(findIterable);
|
||||
|
||||
@@ -1,96 +1,96 @@
|
||||
/*
|
||||
* Copyright 2002-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.context.support.AbstractApplicationContext;
|
||||
|
||||
/**
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class PersonExample {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PersonExample.class);
|
||||
|
||||
@Autowired private MongoOperations mongoOps;
|
||||
|
||||
public static void main(String[] args) {
|
||||
AbstractApplicationContext applicationContext = new AnnotationConfigApplicationContext(PersonExampleAppConfig.class);
|
||||
PersonExample example = applicationContext.getBean(PersonExample.class);
|
||||
example.doWork();
|
||||
applicationContext.close();
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
mongoOps.dropCollection("personexample");
|
||||
|
||||
PersonWithIdPropertyOfTypeString p = new PersonWithIdPropertyOfTypeString();
|
||||
p.setFirstName("Sven");
|
||||
p.setAge(22);
|
||||
|
||||
mongoOps.save(p);
|
||||
|
||||
PersonWithIdPropertyOfTypeString p2 = new PersonWithIdPropertyOfTypeString();
|
||||
p2.setFirstName("Jon");
|
||||
p2.setAge(23);
|
||||
|
||||
mongoOps.save(p2);
|
||||
|
||||
LOGGER.debug("Saved: " + p);
|
||||
|
||||
p = mongoOps.findById(p.getId(), PersonWithIdPropertyOfTypeString.class);
|
||||
|
||||
LOGGER.debug("Found: " + p);
|
||||
|
||||
// mongoOps.updateFirst(new Query(where("firstName").is("Sven")), new Update().set("age", 24));
|
||||
|
||||
// mongoOps.updateFirst(new Query(where("firstName").is("Sven")), update("age", 24));
|
||||
|
||||
p = mongoOps.findById(p.getId(), PersonWithIdPropertyOfTypeString.class);
|
||||
LOGGER.debug("Updated: " + p);
|
||||
|
||||
List<PersonWithIdPropertyOfTypeString> folks = mongoOps.findAll(PersonWithIdPropertyOfTypeString.class);
|
||||
LOGGER.debug("Querying for all people...");
|
||||
for (PersonWithIdPropertyOfTypeString element : folks) {
|
||||
LOGGER.debug(element.toString());
|
||||
}
|
||||
|
||||
// mongoOps.remove( query(whereId().is(p.getId())), p.getClass());
|
||||
|
||||
mongoOps.remove(p);
|
||||
|
||||
List<PersonWithIdPropertyOfTypeString> people = mongoOps.findAll(PersonWithIdPropertyOfTypeString.class);
|
||||
|
||||
LOGGER.debug("Number of people = : " + people.size());
|
||||
|
||||
}
|
||||
|
||||
public void doWork2() {
|
||||
mongoOps.dropCollection("personexample");
|
||||
|
||||
PersonWithIdPropertyOfTypeString p = new PersonWithIdPropertyOfTypeString();
|
||||
p.setFirstName("Sven");
|
||||
p.setAge(22);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
* Copyright 2002-2013 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
import org.springframework.context.support.AbstractApplicationContext;
|
||||
|
||||
/**
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public class PersonExample {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PersonExample.class);
|
||||
|
||||
@Autowired private MongoOperations mongoOps;
|
||||
|
||||
public static void main(String[] args) {
|
||||
AbstractApplicationContext applicationContext = new AnnotationConfigApplicationContext(PersonExampleAppConfig.class);
|
||||
PersonExample example = applicationContext.getBean(PersonExample.class);
|
||||
example.doWork();
|
||||
applicationContext.close();
|
||||
}
|
||||
|
||||
public void doWork() {
|
||||
mongoOps.dropCollection("personexample");
|
||||
|
||||
PersonWithIdPropertyOfTypeString p = new PersonWithIdPropertyOfTypeString();
|
||||
p.setFirstName("Sven");
|
||||
p.setAge(22);
|
||||
|
||||
mongoOps.save(p);
|
||||
|
||||
PersonWithIdPropertyOfTypeString p2 = new PersonWithIdPropertyOfTypeString();
|
||||
p2.setFirstName("Jon");
|
||||
p2.setAge(23);
|
||||
|
||||
mongoOps.save(p2);
|
||||
|
||||
LOGGER.debug("Saved: " + p);
|
||||
|
||||
p = mongoOps.findById(p.getId(), PersonWithIdPropertyOfTypeString.class);
|
||||
|
||||
LOGGER.debug("Found: " + p);
|
||||
|
||||
// mongoOps.updateFirst(new Query(where("firstName").is("Sven")), new Update().set("age", 24));
|
||||
|
||||
// mongoOps.updateFirst(new Query(where("firstName").is("Sven")), update("age", 24));
|
||||
|
||||
p = mongoOps.findById(p.getId(), PersonWithIdPropertyOfTypeString.class);
|
||||
LOGGER.debug("Updated: " + p);
|
||||
|
||||
List<PersonWithIdPropertyOfTypeString> folks = mongoOps.findAll(PersonWithIdPropertyOfTypeString.class);
|
||||
LOGGER.debug("Querying for all people...");
|
||||
for (PersonWithIdPropertyOfTypeString element : folks) {
|
||||
LOGGER.debug(element.toString());
|
||||
}
|
||||
|
||||
// mongoOps.remove( query(whereId().is(p.getId())), p.getClass());
|
||||
|
||||
mongoOps.remove(p);
|
||||
|
||||
List<PersonWithIdPropertyOfTypeString> people = mongoOps.findAll(PersonWithIdPropertyOfTypeString.class);
|
||||
|
||||
LOGGER.debug("Number of people = : " + people.size());
|
||||
|
||||
}
|
||||
|
||||
public void doWork2() {
|
||||
mongoOps.dropCollection("personexample");
|
||||
|
||||
PersonWithIdPropertyOfTypeString p = new PersonWithIdPropertyOfTypeString();
|
||||
p.setFirstName("Sven");
|
||||
p.setAge(22);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,40 +1,40 @@
|
||||
/*
|
||||
* Copyright 2002-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
@Configuration
|
||||
public class PersonExampleAppConfig {
|
||||
|
||||
@Bean
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient("localhost");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoClient(), "database");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public PersonExample personExample() {
|
||||
return new PersonExample();
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2002-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
@Configuration
|
||||
public class PersonExampleAppConfig {
|
||||
|
||||
@Bean
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient("localhost");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoClient(), "database");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public PersonExample personExample() {
|
||||
return new PersonExample();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,73 +1,73 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class Portfolio {
|
||||
|
||||
private String portfolioName;
|
||||
private User user;
|
||||
private List<Trade> trades;
|
||||
private Map<String, Integer> positions;
|
||||
private Map<String, Person> portfolioManagers;
|
||||
|
||||
public Map<String, Person> getPortfolioManagers() {
|
||||
return portfolioManagers;
|
||||
}
|
||||
|
||||
public void setPortfolioManagers(Map<String, Person> portfolioManagers) {
|
||||
this.portfolioManagers = portfolioManagers;
|
||||
}
|
||||
|
||||
public Map<String, Integer> getPositions() {
|
||||
return positions;
|
||||
}
|
||||
|
||||
public void setPositions(Map<String, Integer> positions) {
|
||||
this.positions = positions;
|
||||
}
|
||||
|
||||
public Portfolio() {
|
||||
trades = new ArrayList<Trade>();
|
||||
}
|
||||
|
||||
public String getPortfolioName() {
|
||||
return portfolioName;
|
||||
}
|
||||
|
||||
public void setPortfolioName(String portfolioName) {
|
||||
this.portfolioName = portfolioName;
|
||||
}
|
||||
|
||||
public List<Trade> getTrades() {
|
||||
return trades;
|
||||
}
|
||||
|
||||
public void setTrades(List<Trade> trades) {
|
||||
this.trades = trades;
|
||||
}
|
||||
|
||||
public User getUser() {
|
||||
return user;
|
||||
}
|
||||
|
||||
public void setUser(User user) {
|
||||
this.user = user;
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class Portfolio {
|
||||
|
||||
private String portfolioName;
|
||||
private User user;
|
||||
private List<Trade> trades;
|
||||
private Map<String, Integer> positions;
|
||||
private Map<String, Person> portfolioManagers;
|
||||
|
||||
public Map<String, Person> getPortfolioManagers() {
|
||||
return portfolioManagers;
|
||||
}
|
||||
|
||||
public void setPortfolioManagers(Map<String, Person> portfolioManagers) {
|
||||
this.portfolioManagers = portfolioManagers;
|
||||
}
|
||||
|
||||
public Map<String, Integer> getPositions() {
|
||||
return positions;
|
||||
}
|
||||
|
||||
public void setPositions(Map<String, Integer> positions) {
|
||||
this.positions = positions;
|
||||
}
|
||||
|
||||
public Portfolio() {
|
||||
trades = new ArrayList<Trade>();
|
||||
}
|
||||
|
||||
public String getPortfolioName() {
|
||||
return portfolioName;
|
||||
}
|
||||
|
||||
public void setPortfolioName(String portfolioName) {
|
||||
this.portfolioName = portfolioName;
|
||||
}
|
||||
|
||||
public List<Trade> getTrades() {
|
||||
return trades;
|
||||
}
|
||||
|
||||
public void setTrades(List<Trade> trades) {
|
||||
this.trades = trades;
|
||||
}
|
||||
|
||||
public User getUser() {
|
||||
return user;
|
||||
}
|
||||
|
||||
public void setUser(User user) {
|
||||
this.user = user;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,60 +1,60 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
public class Trade {
|
||||
|
||||
private String ticker;
|
||||
|
||||
private long quantity;
|
||||
|
||||
private double price;
|
||||
|
||||
private String orderType;
|
||||
|
||||
public String getOrderType() {
|
||||
return orderType;
|
||||
}
|
||||
|
||||
public void setOrderType(String orderType) {
|
||||
this.orderType = orderType;
|
||||
}
|
||||
|
||||
public double getPrice() {
|
||||
return price;
|
||||
}
|
||||
|
||||
public void setPrice(double price) {
|
||||
this.price = price;
|
||||
}
|
||||
|
||||
public long getQuantity() {
|
||||
return quantity;
|
||||
}
|
||||
|
||||
public void setQuantity(long quantity) {
|
||||
this.quantity = quantity;
|
||||
}
|
||||
|
||||
public String getTicker() {
|
||||
return ticker;
|
||||
}
|
||||
|
||||
public void setTicker(String ticker) {
|
||||
this.ticker = ticker;
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
public class Trade {
|
||||
|
||||
private String ticker;
|
||||
|
||||
private long quantity;
|
||||
|
||||
private double price;
|
||||
|
||||
private String orderType;
|
||||
|
||||
public String getOrderType() {
|
||||
return orderType;
|
||||
}
|
||||
|
||||
public void setOrderType(String orderType) {
|
||||
this.orderType = orderType;
|
||||
}
|
||||
|
||||
public double getPrice() {
|
||||
return price;
|
||||
}
|
||||
|
||||
public void setPrice(double price) {
|
||||
this.price = price;
|
||||
}
|
||||
|
||||
public long getQuantity() {
|
||||
return quantity;
|
||||
}
|
||||
|
||||
public void setQuantity(long quantity) {
|
||||
this.quantity = quantity;
|
||||
}
|
||||
|
||||
public String getTicker() {
|
||||
return ticker;
|
||||
}
|
||||
|
||||
public void setTicker(String ticker) {
|
||||
this.ticker = ticker;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,71 +1,71 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
public class User {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((accountName == null) ? 0 : accountName.hashCode());
|
||||
result = prime * result + ((userName == null) ? 0 : userName.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
User other = (User) obj;
|
||||
if (accountName == null) {
|
||||
if (other.accountName != null)
|
||||
return false;
|
||||
} else if (!accountName.equals(other.accountName))
|
||||
return false;
|
||||
if (userName == null) {
|
||||
if (other.userName != null)
|
||||
return false;
|
||||
} else if (!userName.equals(other.userName))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
private String accountName;
|
||||
|
||||
private String userName;
|
||||
|
||||
public String getAccountName() {
|
||||
return accountName;
|
||||
}
|
||||
|
||||
public void setAccountName(String accountName) {
|
||||
this.accountName = accountName;
|
||||
}
|
||||
|
||||
public String getUserName() {
|
||||
return userName;
|
||||
}
|
||||
|
||||
public void setUserName(String userName) {
|
||||
this.userName = userName;
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
public class User {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((accountName == null) ? 0 : accountName.hashCode());
|
||||
result = prime * result + ((userName == null) ? 0 : userName.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
User other = (User) obj;
|
||||
if (accountName == null) {
|
||||
if (other.accountName != null)
|
||||
return false;
|
||||
} else if (!accountName.equals(other.accountName))
|
||||
return false;
|
||||
if (userName == null) {
|
||||
if (other.userName != null)
|
||||
return false;
|
||||
} else if (!userName.equals(other.userName))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
private String accountName;
|
||||
|
||||
private String userName;
|
||||
|
||||
public String getAccountName() {
|
||||
return accountName;
|
||||
}
|
||||
|
||||
public void setAccountName(String accountName) {
|
||||
this.accountName = accountName;
|
||||
}
|
||||
|
||||
public String getUserName() {
|
||||
return userName;
|
||||
}
|
||||
|
||||
public void setUserName(String userName) {
|
||||
this.userName = userName;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,66 +1,66 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.joda.time.LocalDate;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
@Document(collection = "newyork")
|
||||
public class Venue {
|
||||
|
||||
@Id private String id;
|
||||
private String name;
|
||||
private double[] location;
|
||||
private LocalDate openingDate;
|
||||
|
||||
@PersistenceConstructor
|
||||
Venue(String name, double[] location) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.location = location;
|
||||
}
|
||||
|
||||
public Venue(String name, double x, double y) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.location = new double[] { x, y };
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public double[] getLocation() {
|
||||
return location;
|
||||
}
|
||||
|
||||
public LocalDate getOpeningDate() {
|
||||
return openingDate;
|
||||
}
|
||||
|
||||
public void setOpeningDate(LocalDate openingDate) {
|
||||
this.openingDate = openingDate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Venue [id=" + id + ", name=" + name + ", location=" + Arrays.toString(location) + "]";
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.joda.time.LocalDate;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.annotation.PersistenceConstructor;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
@Document(collection = "newyork")
|
||||
public class Venue {
|
||||
|
||||
@Id private String id;
|
||||
private String name;
|
||||
private double[] location;
|
||||
private LocalDate openingDate;
|
||||
|
||||
@PersistenceConstructor
|
||||
Venue(String name, double[] location) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.location = location;
|
||||
}
|
||||
|
||||
public Venue(String name, double x, double y) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.location = new double[] { x, y };
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public double[] getLocation() {
|
||||
return location;
|
||||
}
|
||||
|
||||
public LocalDate getOpeningDate() {
|
||||
return openingDate;
|
||||
}
|
||||
|
||||
public void setOpeningDate(LocalDate openingDate) {
|
||||
this.openingDate = openingDate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Venue [id=" + id + ", name=" + name + ", location=" + Arrays.toString(location) + "]";
|
||||
}
|
||||
}
|
||||
|
||||
536
spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java
Normal file → Executable file
536
spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
@@ -21,6 +21,8 @@ import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.DocumentTestUtils.*;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.net.URL;
|
||||
@@ -1803,6 +1805,22 @@ public class MappingMongoConverterUnitTests {
|
||||
converter.read(TypeWithMapOfLongValues.class, source);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1831
|
||||
public void shouldConvertArrayInConstructorCorrectly() {
|
||||
|
||||
org.bson.Document source = new org.bson.Document("array", Collections.emptyList());
|
||||
|
||||
assertThat(converter.read(WithArrayInConstructor.class, source).array, is(emptyArray()));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1831
|
||||
public void shouldConvertNullForArrayInConstructorCorrectly() {
|
||||
|
||||
org.bson.Document source = new org.bson.Document();
|
||||
|
||||
assertThat(converter.read(WithArrayInConstructor.class, source).array, is(nullValue()));
|
||||
}
|
||||
|
||||
static class GenericType<T> {
|
||||
T content;
|
||||
}
|
||||
@@ -2157,4 +2175,11 @@ public class MappingMongoConverterUnitTests {
|
||||
static class TypeWithMapOfLongValues {
|
||||
Map<String, Long> map;
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
static class WithArrayInConstructor {
|
||||
|
||||
final String[] array;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,16 +15,22 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import org.bson.types.Binary;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
@@ -34,37 +40,88 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
* Integration tests for {@link MongoConverters}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class MongoConvertersIntegrationTests {
|
||||
|
||||
static final String COLLECTION = "_sample";
|
||||
static final String COLLECTION = "converter-tests";
|
||||
|
||||
@Autowired
|
||||
MongoOperations template;
|
||||
@Autowired MongoOperations template;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
template.dropCollection(COLLECTION);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Test // DATAMONGO-422
|
||||
public void writesUUIDBinaryCorrectly() {
|
||||
|
||||
Wrapper wrapper = new Wrapper();
|
||||
wrapper.uuid = UUID.randomUUID();
|
||||
template.save(wrapper);
|
||||
|
||||
assertThat(wrapper.id, is(notNullValue()));
|
||||
assertThat(wrapper.id).isNotNull();
|
||||
|
||||
Wrapper result = template.findOne(Query.query(Criteria.where("id").is(wrapper.id)), Wrapper.class);
|
||||
assertThat(result.uuid, is(wrapper.uuid));
|
||||
assertThat(result.uuid).isEqualTo(wrapper.uuid);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1802
|
||||
public void shouldConvertBinaryDataOnRead() {
|
||||
|
||||
WithBinaryDataInArray wbd = new WithBinaryDataInArray();
|
||||
wbd.data = "calliope-mini".getBytes();
|
||||
|
||||
template.save(wbd);
|
||||
|
||||
assertThat(template.findOne(query(where("id").is(wbd.id)), WithBinaryDataInArray.class)).isEqualTo(wbd);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1802
|
||||
public void shouldConvertEmptyBinaryDataOnRead() {
|
||||
|
||||
WithBinaryDataInArray wbd = new WithBinaryDataInArray();
|
||||
wbd.data = new byte[0];
|
||||
|
||||
template.save(wbd);
|
||||
|
||||
assertThat(template.findOne(query(where("id").is(wbd.id)), WithBinaryDataInArray.class)).isEqualTo(wbd);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1802
|
||||
public void shouldReadBinaryType() {
|
||||
|
||||
WithBinaryDataType wbd = new WithBinaryDataType();
|
||||
wbd.data = new Binary("calliope-mini".getBytes());
|
||||
|
||||
template.save(wbd);
|
||||
|
||||
assertThat(template.findOne(query(where("id").is(wbd.id)), WithBinaryDataType.class)).isEqualTo(wbd);
|
||||
}
|
||||
|
||||
@Document(collection = COLLECTION)
|
||||
static class Wrapper {
|
||||
|
||||
String id;
|
||||
UUID uuid;
|
||||
}
|
||||
|
||||
@Data
|
||||
@Document(collection = COLLECTION)
|
||||
static class WithBinaryDataInArray {
|
||||
|
||||
@Id String id;
|
||||
byte[] data;
|
||||
}
|
||||
|
||||
@Data
|
||||
@Document(collection = COLLECTION)
|
||||
static class WithBinaryDataType {
|
||||
|
||||
@Id String id;
|
||||
Binary data;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,11 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.DocumentTestUtils.*;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
import static org.springframework.data.mongodb.test.util.Assertions.*;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.Arrays;
|
||||
@@ -29,9 +30,6 @@ import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.hamcrest.collection.IsIterableContainingInOrder;
|
||||
import org.hamcrest.core.Is;
|
||||
import org.hamcrest.core.IsEqual;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
@@ -132,7 +130,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
Document set = getAsDocument(mappedObject, "$set");
|
||||
assertThat(set.get("_class"), nullValue());
|
||||
assertThat(set.get("_class")).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-807
|
||||
@@ -145,7 +143,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(ModelWrapper.class));
|
||||
|
||||
Document set = getAsDocument(mappedObject, "$set");
|
||||
assertThat(set.get("_class"), nullValue());
|
||||
assertThat(set.get("_class")).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-407
|
||||
@@ -172,8 +170,8 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
Document set = getAsDocument(mappedObject, "$set");
|
||||
assertThat(set.get("aliased.$.value"), is("foo"));
|
||||
assertThat(set.get("aliased.$.otherValue"), is("bar"));
|
||||
assertThat(set.get("aliased.$.value")).isEqualTo("foo");
|
||||
assertThat(set.get("aliased.$.otherValue")).isEqualTo("bar");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-407
|
||||
@@ -186,11 +184,10 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
Document document = getAsDocument(mappedObject, "$set");
|
||||
assertThat(document.get("aliased.$.value"), is("foo"));
|
||||
assertThat(document.get("aliased.$.value")).isEqualTo("foo");
|
||||
|
||||
Document someObject = getAsDocument(document, "aliased.$.someObject");
|
||||
assertThat(someObject, is(notNullValue()));
|
||||
assertThat(someObject.get("value"), is("bubu"));
|
||||
assertThat(someObject).isNotNull().containsEntry("value", "bubu");
|
||||
assertTypeHint(someObject, ConcreteChildClass.class);
|
||||
}
|
||||
|
||||
@@ -205,10 +202,10 @@ public class UpdateMapperUnitTests {
|
||||
Document values = getAsDocument(push, "values");
|
||||
List<Object> each = getAsDBList(values, "$each");
|
||||
|
||||
assertThat(push.get("_class"), nullValue());
|
||||
assertThat(values.get("_class"), nullValue());
|
||||
assertThat(push.get("_class")).isNull();
|
||||
assertThat(values.get("_class")).isNull();
|
||||
|
||||
assertThat(each, IsIterableContainingInOrder.contains("spring", "data", "mongodb"));
|
||||
assertThat(each).containsExactly("spring", "data", "mongodb");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-812
|
||||
@@ -220,8 +217,8 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document values = getAsDocument(push, "values");
|
||||
|
||||
assertThat(push.get("_class"), nullValue());
|
||||
assertThat(values.get("_class"), nullValue());
|
||||
assertThat(push.get("_class")).isNull();
|
||||
assertThat(values.get("_class")).isNull();
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@@ -237,7 +234,7 @@ public class UpdateMapperUnitTests {
|
||||
List<Object> each = getAsDBList(model, "$each");
|
||||
List<Object> values = getAsDBList((Document) each.get(0), "values");
|
||||
|
||||
assertThat(values, IsIterableContainingInOrder.contains("spring", "data", "mongodb"));
|
||||
assertThat(values).containsExactly("spring", "data", "mongodb");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-812
|
||||
@@ -261,8 +258,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class));
|
||||
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
assertThat(getAsDocument(push, "category").containsKey("$each"), is(true));
|
||||
assertThat(getAsDocument(push, "type").containsKey("$each"), is(true));
|
||||
assertThat(getAsDocument(push, "category")).containsKey("$each");
|
||||
assertThat(getAsDocument(push, "type")).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-943
|
||||
@@ -275,9 +272,9 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key = getAsDocument(push, "key");
|
||||
|
||||
assertThat(key.containsKey("$position"), is(true));
|
||||
assertThat(key.get("$position"), is(2));
|
||||
assertThat(getAsDocument(push, "key").containsKey("$each"), is(true));
|
||||
assertThat(key.containsKey("$position")).isTrue();
|
||||
assertThat(key.get("$position")).isEqualTo(2);
|
||||
assertThat(getAsDocument(push, "key")).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-943
|
||||
@@ -290,9 +287,9 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key = getAsDocument(push, "key");
|
||||
|
||||
assertThat(key.containsKey("$position"), is(true));
|
||||
assertThat(key.get("$position"), is(0));
|
||||
assertThat(getAsDocument(push, "key").containsKey("$each"), is(true));
|
||||
assertThat(key.containsKey("$position")).isTrue();
|
||||
assertThat(key.get("$position")).isEqualTo(0);
|
||||
assertThat(getAsDocument(push, "key")).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-943
|
||||
@@ -305,8 +302,8 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key = getAsDocument(push, "key");
|
||||
|
||||
assertThat(key.containsKey("$position"), is(false));
|
||||
assertThat(getAsDocument(push, "key").containsKey("$each"), is(true));
|
||||
assertThat(key).doesNotContainKey("$position");
|
||||
assertThat(getAsDocument(push, "key")).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-943
|
||||
@@ -319,8 +316,8 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key = getAsDocument(push, "key");
|
||||
|
||||
assertThat(key.containsKey("$position"), is(false));
|
||||
assertThat(getAsDocument(push, "key").containsKey("$each"), is(true));
|
||||
assertThat(key).doesNotContainKey("$position");
|
||||
assertThat(getAsDocument(push, "key")).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-832
|
||||
@@ -333,9 +330,8 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key = getAsDocument(push, "key");
|
||||
|
||||
assertThat(key.containsKey("$slice"), is(true));
|
||||
assertThat(key.get("$slice"), is(5));
|
||||
assertThat(key.containsKey("$each"), is(true));
|
||||
assertThat(key).containsKey("$slice").containsEntry("$slice", 5);
|
||||
assertThat(key).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-832
|
||||
@@ -349,15 +345,13 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key = getAsDocument(push, "key");
|
||||
|
||||
assertThat(key.containsKey("$slice"), is(true));
|
||||
assertThat((Integer) key.get("$slice"), is(5));
|
||||
assertThat(key.containsKey("$each"), is(true));
|
||||
assertThat(key).containsKey("$slice").containsEntry("$slice", 5);
|
||||
assertThat(key.containsKey("$each")).isTrue();
|
||||
|
||||
Document key2 = getAsDocument(push, "key-2");
|
||||
|
||||
assertThat(key2.containsKey("$slice"), is(true));
|
||||
assertThat((Integer) key2.get("$slice"), is(-2));
|
||||
assertThat(key2.containsKey("$each"), is(true));
|
||||
assertThat(key2).containsKey("$slice").containsEntry("$slice", -2);
|
||||
assertThat(key2).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1141
|
||||
@@ -371,9 +365,9 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key = getAsDocument(push, "scores");
|
||||
|
||||
assertThat(key.containsKey("$sort"), is(true));
|
||||
assertThat((Integer) key.get("$sort"), is(-1));
|
||||
assertThat(key.containsKey("$each"), is(true));
|
||||
assertThat(key).containsKey("$sort");
|
||||
assertThat(key).containsEntry("$sort", -1);
|
||||
assertThat(key).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1141
|
||||
@@ -389,9 +383,9 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key = getAsDocument(push, "list");
|
||||
|
||||
assertThat(key.containsKey("$sort"), is(true));
|
||||
assertThat((Document) key.get("$sort"), equalTo(new Document("renamed-value", 1).append("field", 1)));
|
||||
assertThat(key.containsKey("$each"), is(true));
|
||||
assertThat(key).containsKey("$sort");
|
||||
assertThat(key.get("$sort")).isEqualTo(new Document("renamed-value", 1).append("field", 1));
|
||||
assertThat(key).containsKey("$each");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1141
|
||||
@@ -405,15 +399,15 @@ public class UpdateMapperUnitTests {
|
||||
Document push = getAsDocument(mappedObject, "$push");
|
||||
Document key1 = getAsDocument(push, "authors");
|
||||
|
||||
assertThat(key1.containsKey("$sort"), is(true));
|
||||
assertThat((Integer) key1.get("$sort"), is(1));
|
||||
assertThat(key1.containsKey("$each"), is(true));
|
||||
assertThat(key1).containsKey("$sort");
|
||||
assertThat(key1).containsEntry("$sort", 1);
|
||||
assertThat(key1).containsKey("$each");
|
||||
|
||||
Document key2 = getAsDocument(push, "chapters");
|
||||
|
||||
assertThat(key2.containsKey("$sort"), is(true));
|
||||
assertThat((Document) key2.get("$sort"), equalTo(new Document("order", 1)));
|
||||
assertThat(key2.containsKey("$each"), is(true));
|
||||
assertThat(key2).containsKey("$sort");
|
||||
assertThat(key2.get("$sort")).isEqualTo(new Document("order", 1));
|
||||
assertThat(key2.containsKey("$each")).isTrue();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-410
|
||||
@@ -438,7 +432,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(DocumentWithDBRefCollection.class));
|
||||
|
||||
Document pullClause = getAsDocument(mappedObject, "$pull");
|
||||
assertThat(pullClause.get("dbRefAnnotatedList"), is(new DBRef("entity", "2")));
|
||||
assertThat(pullClause.get("dbRefAnnotatedList")).isEqualTo(new DBRef("entity", "2"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-404
|
||||
@@ -452,7 +446,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(DocumentWithDBRefCollection.class));
|
||||
|
||||
Document pullClause = getAsDocument(mappedObject, "$pull");
|
||||
assertThat(pullClause.get("dbRefAnnotatedList"), is(new DBRef("entity", entity.id)));
|
||||
assertThat(pullClause.get("dbRefAnnotatedList")).isEqualTo(new DBRef("entity", entity.id));
|
||||
}
|
||||
|
||||
@Test(expected = MappingException.class) // DATAMONGO-404
|
||||
@@ -470,7 +464,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(Wrapper.class));
|
||||
|
||||
Document pullClause = getAsDocument(mappedObject, "$pull");
|
||||
assertThat(pullClause.containsKey("mapped.dbRefAnnotatedList"), is(true));
|
||||
assertThat(pullClause.containsKey("mapped.dbRefAnnotatedList")).isTrue();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-468
|
||||
@@ -484,7 +478,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(DocumentWithDBRefCollection.class));
|
||||
|
||||
Document setClause = getAsDocument(mappedObject, "$set");
|
||||
assertThat(setClause.get("dbRefProperty"), is(new DBRef("entity", entity.id)));
|
||||
assertThat(setClause.get("dbRefProperty")).isEqualTo(new DBRef("entity", entity.id));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-862
|
||||
@@ -495,7 +489,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
Document setClause = getAsDocument(mappedObject, "$set");
|
||||
assertThat(setClause.containsKey("listOfInterface.$.value"), is(true));
|
||||
assertThat(setClause.containsKey("listOfInterface.$.value")).isTrue();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-863
|
||||
@@ -513,7 +507,7 @@ public class UpdateMapperUnitTests {
|
||||
Document idClause = getAsDocument(options, "_id");
|
||||
List<Object> inClause = getAsDBList(idClause, "$in");
|
||||
|
||||
assertThat(inClause, IsIterableContainingInOrder.contains(1L, 2L));
|
||||
assertThat(inClause).containsExactly(1L, 2L);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
@@ -528,7 +522,7 @@ public class UpdateMapperUnitTests {
|
||||
Document values = getAsDocument(addToSet, "values");
|
||||
List<Object> each = getAsDBList(values, "$each");
|
||||
|
||||
assertThat(each, IsIterableContainingInOrder.contains("spring", "data", "mongodb"));
|
||||
assertThat(each).containsExactly("spring", "data", "mongodb");
|
||||
}
|
||||
|
||||
@Test // DATAMONG0-471
|
||||
@@ -559,7 +553,7 @@ public class UpdateMapperUnitTests {
|
||||
Object model = $set.get("referencedDocument");
|
||||
|
||||
DBRef expectedDBRef = new DBRef("interfaceDocumentDefinitionImpl", "1");
|
||||
assertThat(model, allOf(instanceOf(DBRef.class), IsEqual.equalTo(expectedDBRef)));
|
||||
assertThat(model).isInstanceOf(DBRef.class).isEqualTo(expectedDBRef);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-847
|
||||
@@ -574,7 +568,7 @@ public class UpdateMapperUnitTests {
|
||||
Document value = DocumentTestUtils.getAsDocument(list, "value");
|
||||
List<Object> $in = DocumentTestUtils.getAsDBList(value, "$in");
|
||||
|
||||
assertThat($in, IsIterableContainingInOrder.contains("foo", "bar"));
|
||||
assertThat($in).containsExactly("foo", "bar");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-847
|
||||
@@ -587,7 +581,7 @@ public class UpdateMapperUnitTests {
|
||||
Document $pull = DocumentTestUtils.getAsDocument(mappedUpdate, "$pull");
|
||||
Document list = DocumentTestUtils.getAsDocument($pull, "dbRefAnnotatedList");
|
||||
|
||||
assertThat(list, equalTo(new org.bson.Document().append("_id", "1")));
|
||||
assertThat(list).isEqualTo(new org.bson.Document().append("_id", "1"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1077
|
||||
@@ -601,7 +595,7 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
Document $unset = DocumentTestUtils.getAsDocument(mappedUpdate, "$unset");
|
||||
|
||||
assertThat($unset, equalTo(new org.bson.Document().append("dbRefAnnotatedList.$", 1)));
|
||||
assertThat($unset).isEqualTo(new org.bson.Document().append("dbRefAnnotatedList.$", 1));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1210
|
||||
@@ -613,8 +607,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DocumentWithNestedCollection.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$addToSet.nestedDocs.$each.[0]._class"));
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$addToSet.nestedDocs.$each.[1]._class"));
|
||||
assertThat(mappedUpdate).doesNotContainKey("$addToSet.nestedDocs.$each.[0]._class")
|
||||
.doesNotContainKey("$addToSet.nestedDocs.$each.[1]._class");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1210
|
||||
@@ -625,8 +619,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ListModelWrapper.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$addToSet.models.$each.[0]._class", ModelImpl.class.getName()));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$addToSet.models.$each.[1]._class", ModelImpl.class.getName()));
|
||||
assertThat(mappedUpdate).containsEntry("$addToSet.models.$each.[0]._class", ModelImpl.class.getName());
|
||||
assertThat(mappedUpdate).containsEntry("$addToSet.models.$each.[1]._class", ModelImpl.class.getName());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1210
|
||||
@@ -638,10 +632,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(ParentClass.class));
|
||||
|
||||
assertThat(mappedUpdate,
|
||||
isBsonObject().containing("$addToSet.aliased.$each.[0]._class", ConcreteChildClass.class.getName()));
|
||||
assertThat(mappedUpdate,
|
||||
isBsonObject().containing("$addToSet.aliased.$each.[1]._class", ConcreteChildClass.class.getName()));
|
||||
assertThat(mappedUpdate).containsEntry("$addToSet.aliased.$each.[0]._class", ConcreteChildClass.class.getName());
|
||||
assertThat(mappedUpdate).containsEntry("$addToSet.aliased.$each.[1]._class", ConcreteChildClass.class.getName());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1210
|
||||
@@ -654,12 +646,11 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DomainTypeWithListOfConcreteTypesHavingSingleInterfaceTypeAttribute.class));
|
||||
|
||||
assertThat(mappedUpdate,
|
||||
isBsonObject().notContaining("$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0]._class"));
|
||||
assertThat(mappedUpdate,
|
||||
isBsonObject().containing(
|
||||
"$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0].interfaceType._class",
|
||||
ModelImpl.class.getName()));
|
||||
assertThat(mappedUpdate)
|
||||
.doesNotContainKey("$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0]._class");
|
||||
assertThat(mappedUpdate).containsEntry(
|
||||
"$addToSet.listHoldingConcretyTypeWithInterfaceTypeAttribute.$each.[0].interfaceType._class",
|
||||
ModelImpl.class.getName());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1210
|
||||
@@ -672,9 +663,26 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteTypeWithListAttributeOfInterfaceType._class"));
|
||||
assertThat(mappedUpdate, isBsonObject()
|
||||
.containing("$set.concreteTypeWithListAttributeOfInterfaceType.models.[0]._class", ModelImpl.class.getName()));
|
||||
assertThat(mappedUpdate).doesNotContainKey("$set.concreteTypeWithListAttributeOfInterfaceType._class");
|
||||
assertThat(mappedUpdate).containsEntry("$set.concreteTypeWithListAttributeOfInterfaceType.models.[0]._class",
|
||||
ModelImpl.class.getName());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1809
|
||||
public void pathShouldIdentifyPositionalParameterWithMoreThanOneDigit() {
|
||||
|
||||
Document at2digitPosition = mapper.getMappedObject(new Update()
|
||||
.addToSet("concreteInnerList.10.concreteTypeList", new SomeInterfaceImpl("szeth")).getUpdateObject(),
|
||||
context.getPersistentEntity(Outer.class));
|
||||
|
||||
Document at3digitPosition = mapper.getMappedObject(new Update()
|
||||
.addToSet("concreteInnerList.123.concreteTypeList", new SomeInterfaceImpl("lopen")).getUpdateObject(),
|
||||
context.getPersistentEntity(Outer.class));
|
||||
|
||||
assertThat(at2digitPosition).isEqualTo(new Document("$addToSet",
|
||||
new Document("concreteInnerList.10.concreteTypeList", new Document("value", "szeth"))));
|
||||
assertThat(at3digitPosition).isEqualTo(new Document("$addToSet",
|
||||
new Document("concreteInnerList.123.concreteTypeList", new Document("value", "lopen"))));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1236
|
||||
@@ -684,8 +692,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.value.name", "kaladin"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.value._class", NestedDocument.class.getName()));
|
||||
assertThat(mappedUpdate).containsEntry("$set.value.name", "kaladin");
|
||||
assertThat(mappedUpdate).containsEntry("$set.value._class", NestedDocument.class.getName());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1236
|
||||
@@ -695,8 +703,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.concreteValue.name", "shallan"));
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteValue._class"));
|
||||
assertThat(mappedUpdate).containsEntry("$set.concreteValue.name", "shallan");
|
||||
assertThat(mappedUpdate).doesNotContainKey("$set.concreteValue._class");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1236
|
||||
@@ -706,8 +714,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithAliasedObject.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.renamed-value.name", "adolin"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.renamed-value._class", NestedDocument.class.getName()));
|
||||
assertThat(mappedUpdate).containsEntry("$set.renamed-value.name", "adolin");
|
||||
assertThat(mappedUpdate).containsEntry("$set.renamed-value._class", NestedDocument.class.getName());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1236
|
||||
@@ -719,8 +727,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.map.szeth.name", "son-son-vallano"));
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.map.szeth._class", NestedDocument.class.getName()));
|
||||
assertThat(mappedUpdate).containsEntry("$set.map.szeth.name", "son-son-vallano");
|
||||
assertThat(mappedUpdate).containsEntry("$set.map.szeth._class", NestedDocument.class.getName());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1236
|
||||
@@ -732,8 +740,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(EntityWithObjectMap.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.concreteMap.jasnah.name", "kholin"));
|
||||
assertThat(mappedUpdate, isBsonObject().notContaining("$set.concreteMap.jasnah._class"));
|
||||
assertThat(mappedUpdate).containsEntry("$set.concreteMap.jasnah.name", "kholin");
|
||||
assertThat(mappedUpdate).doesNotContainKey("$set.concreteMap.jasnah._class");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1250
|
||||
@@ -757,7 +765,7 @@ public class UpdateMapperUnitTests {
|
||||
Document result = mapper.getMappedObject(update.getUpdateObject(),
|
||||
mappingContext.getPersistentEntity(ClassWithEnum.class));
|
||||
|
||||
assertThat(result, isBsonObject().containing("$set.allocation", ClassWithEnum.Allocation.AVAILABLE.code));
|
||||
assertThat(result).containsEntry("$set.allocation", ClassWithEnum.Allocation.AVAILABLE.code);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1251
|
||||
@@ -769,8 +777,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(ConcreteChildClass.class));
|
||||
|
||||
Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set");
|
||||
assertThat($set.containsKey("value"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
assertThat($set).containsKey("value").containsEntry("value", null);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1251
|
||||
@@ -782,8 +789,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(ClassWithJava8Date.class));
|
||||
|
||||
Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set");
|
||||
assertThat($set.containsKey("date"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
assertThat($set).containsKey("date").doesNotContainKey("value");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1251
|
||||
@@ -795,8 +801,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(ListModel.class));
|
||||
|
||||
Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set");
|
||||
assertThat($set.containsKey("values"), is(true));
|
||||
assertThat($set.get("value"), nullValue());
|
||||
assertThat($set).containsKey("values").doesNotContainKey("value");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1251
|
||||
@@ -808,8 +813,8 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(EntityWithObject.class));
|
||||
|
||||
Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set");
|
||||
assertThat($set.containsKey("concreteValue.name"), is(true));
|
||||
assertThat($set.get("concreteValue.name"), nullValue());
|
||||
assertThat($set).containsKey("concreteValue.name");
|
||||
assertThat($set).containsEntry("concreteValue.name", null);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1288
|
||||
@@ -820,7 +825,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set");
|
||||
assertThat($set.get("intValue"), Is.is(10));
|
||||
assertThat($set.get("intValue")).isEqualTo(10);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1288
|
||||
@@ -831,7 +836,7 @@ public class UpdateMapperUnitTests {
|
||||
context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set");
|
||||
assertThat($set.get("primIntValue"), Is.is(10));
|
||||
assertThat($set.get("primIntValue")).isEqualTo(10);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1404
|
||||
@@ -841,7 +846,7 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$min", new Document("minfield", 10)));
|
||||
assertThat(mappedUpdate).containsEntry("$min", new Document("minfield", 10));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1404
|
||||
@@ -851,7 +856,7 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(),
|
||||
context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$max", new Document("maxfield", 999)));
|
||||
assertThat(mappedUpdate).containsEntry("$max", new Document("maxfield", 999));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1423
|
||||
@@ -876,10 +881,10 @@ public class UpdateMapperUnitTests {
|
||||
mappingContext.getPersistentEntity(ClassWithEnum.class));
|
||||
|
||||
Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set");
|
||||
assertThat($set.containsKey("enumAsMapKey"), is(true));
|
||||
assertThat($set.containsKey("enumAsMapKey")).isTrue();
|
||||
|
||||
Document enumAsMapKey = $set.get("enumAsMapKey", Document.class);
|
||||
assertThat(enumAsMapKey.get("AVAILABLE"), is(100));
|
||||
assertThat(enumAsMapKey.get("AVAILABLE")).isEqualTo(100);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1176
|
||||
@@ -889,8 +894,8 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
Document mappedObject = mapper.getMappedObject(document, context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
assertThat(mappedObject.get("$set"), is(equalTo(new Document("a", "b").append("x", "y").append("key", "value"))));
|
||||
assertThat(mappedObject.size(), is(1));
|
||||
assertThat(mappedObject.get("$set")).isEqualTo(new Document("a", "b").append("x", "y").append("key", "value"));
|
||||
assertThat(mappedObject).hasSize(1);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1176
|
||||
@@ -900,10 +905,10 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
Document mappedObject = mapper.getMappedObject(document, context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
assertThat(mappedObject.get("key"), is(equalTo("value")));
|
||||
assertThat(mappedObject.get("a"), is(equalTo("b")));
|
||||
assertThat(mappedObject.get("x"), is(equalTo("y")));
|
||||
assertThat(mappedObject.size(), is(3));
|
||||
assertThat(mappedObject).containsEntry("key", "value");
|
||||
assertThat(mappedObject).containsEntry("a", "b");
|
||||
assertThat(mappedObject).containsEntry("x", "y");
|
||||
assertThat(mappedObject).hasSize(3);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1176
|
||||
@@ -913,9 +918,9 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
Document mappedObject = mapper.getMappedObject(document, context.getPersistentEntity(SimpleValueHolder.class));
|
||||
|
||||
assertThat(mappedObject.get("$push"), is(equalTo(new Document("x", "y"))));
|
||||
assertThat(mappedObject.get("$set"), is(equalTo(new Document("a", "b"))));
|
||||
assertThat(mappedObject.size(), is(2));
|
||||
assertThat(mappedObject).containsEntry("$push", new Document("x", "y"));
|
||||
assertThat(mappedObject).containsEntry("$set", new Document("a", "b"));
|
||||
assertThat(mappedObject).hasSize(2);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1486
|
||||
@@ -928,7 +933,7 @@ public class UpdateMapperUnitTests {
|
||||
Document mapToSet = getAsDocument(getAsDocument(mappedUpdate, "$set"), "map");
|
||||
|
||||
for (Object key : mapToSet.keySet()) {
|
||||
assertThat(key, is(instanceOf(String.class)));
|
||||
assertThat(key).isInstanceOf(String.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -942,8 +947,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(new Update().set("concreteInnerList", list).getUpdateObject(),
|
||||
context.getPersistentEntity(Outer.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.concreteInnerList.[0].interfaceTypeList.[0]._class")
|
||||
.notContaining("$set.concreteInnerList.[0]._class"));
|
||||
assertThat(mappedUpdate).containsKey("$set.concreteInnerList.[0].interfaceTypeList.[0]._class")
|
||||
.doesNotContainKey("$set.concreteInnerList.[0]._class");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1772
|
||||
@@ -956,8 +961,8 @@ public class UpdateMapperUnitTests {
|
||||
Document mappedUpdate = mapper.getMappedObject(new Update().set("concreteInnerList", list).getUpdateObject(),
|
||||
context.getPersistentEntity(Outer.class));
|
||||
|
||||
assertThat(mappedUpdate, isBsonObject().containing("$set.concreteInnerList.[0].abstractTypeList.[0]._class")
|
||||
.notContaining("$set.concreteInnerList.[0]._class"));
|
||||
assertThat(mappedUpdate).containsKey("$set.concreteInnerList.[0].abstractTypeList.[0]._class")
|
||||
.doesNotContainKey("$set.concreteInnerList.[0]._class");
|
||||
}
|
||||
|
||||
static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes {
|
||||
@@ -1254,6 +1259,7 @@ public class UpdateMapperUnitTests {
|
||||
static class ConcreteInner {
|
||||
List<SomeInterfaceType> interfaceTypeList;
|
||||
List<SomeAbstractType> abstractTypeList;
|
||||
List<SomeInterfaceImpl> concreteTypeList;
|
||||
}
|
||||
|
||||
interface SomeInterfaceType {
|
||||
@@ -1264,8 +1270,11 @@ public class UpdateMapperUnitTests {
|
||||
|
||||
}
|
||||
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
static class SomeInterfaceImpl extends SomeAbstractType implements SomeInterfaceType {
|
||||
|
||||
String value;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,89 +1,89 @@
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class GeoSpatial2DSphereTests extends AbstractGeoSpatialTests {
|
||||
|
||||
@Test // DATAMONGO-360
|
||||
public void indexInfoIsCorrect() {
|
||||
|
||||
IndexOperations operations = template.indexOps(Venue.class);
|
||||
List<IndexInfo> indexInfo = operations.getIndexInfo();
|
||||
|
||||
assertThat(indexInfo.size(), is(2));
|
||||
|
||||
List<IndexField> fields = indexInfo.get(0).getIndexFields();
|
||||
assertThat(fields.size(), is(1));
|
||||
assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC)));
|
||||
|
||||
fields = indexInfo.get(1).getIndexFields();
|
||||
assertThat(fields.size(), is(1));
|
||||
assertThat(fields, hasItem(IndexField.geo("location")));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void geoNearWithMinDistance() {
|
||||
|
||||
NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).minDistance(1);
|
||||
|
||||
GeoResults<Venue> result = template.geoNear(geoNear, Venue.class);
|
||||
|
||||
assertThat(result.getContent().size(), is(not(0)));
|
||||
assertThat(result.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void nearSphereWithMinDistance() {
|
||||
Point point = new Point(-73.99171, 40.738868);
|
||||
List<Venue> venues = template.find(query(where("location").nearSphere(point).minDistance(0.01)), Venue.class);
|
||||
assertThat(venues.size(), is(1));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createIndex() {
|
||||
template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void dropIndex() {
|
||||
template.indexOps(Venue.class).dropIndex("location_2dsphere");
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.geo.Metrics;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class GeoSpatial2DSphereTests extends AbstractGeoSpatialTests {
|
||||
|
||||
@Test // DATAMONGO-360
|
||||
public void indexInfoIsCorrect() {
|
||||
|
||||
IndexOperations operations = template.indexOps(Venue.class);
|
||||
List<IndexInfo> indexInfo = operations.getIndexInfo();
|
||||
|
||||
assertThat(indexInfo.size(), is(2));
|
||||
|
||||
List<IndexField> fields = indexInfo.get(0).getIndexFields();
|
||||
assertThat(fields.size(), is(1));
|
||||
assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC)));
|
||||
|
||||
fields = indexInfo.get(1).getIndexFields();
|
||||
assertThat(fields.size(), is(1));
|
||||
assertThat(fields, hasItem(IndexField.geo("location")));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void geoNearWithMinDistance() {
|
||||
|
||||
NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).minDistance(1);
|
||||
|
||||
GeoResults<Venue> result = template.geoNear(geoNear, Venue.class);
|
||||
|
||||
assertThat(result.getContent().size(), is(not(0)));
|
||||
assertThat(result.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void nearSphereWithMinDistance() {
|
||||
Point point = new Point(-73.99171, 40.738868);
|
||||
List<Venue> venues = template.find(query(where("location").nearSphere(point).minDistance(0.01)), Venue.class);
|
||||
assertThat(venues.size(), is(1));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createIndex() {
|
||||
template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void dropIndex() {
|
||||
template.indexOps(Venue.class).dropIndex("location_2dsphere");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,79 +1,79 @@
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
|
||||
/**
|
||||
* Modified from https://github.com/deftlabs/mongo-java-geospatial-example
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class GeoSpatial2DTests extends AbstractGeoSpatialTests {
|
||||
|
||||
@Test
|
||||
public void nearPoint() {
|
||||
Point point = new Point(-73.99171, 40.738868);
|
||||
List<Venue> venues = template.find(query(where("location").near(point).maxDistance(0.01)), Venue.class);
|
||||
assertThat(venues.size(), is(7));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-360
|
||||
public void indexInfoIsCorrect() {
|
||||
|
||||
IndexOperations operations = template.indexOps(Venue.class);
|
||||
List<IndexInfo> indexInfo = operations.getIndexInfo();
|
||||
|
||||
assertThat(indexInfo.size(), is(2));
|
||||
|
||||
List<IndexField> fields = indexInfo.get(0).getIndexFields();
|
||||
assertThat(fields.size(), is(1));
|
||||
assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC)));
|
||||
|
||||
fields = indexInfo.get(1).getIndexFields();
|
||||
assertThat(fields.size(), is(1));
|
||||
assertThat(fields, hasItem(IndexField.geo("location")));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createIndex() {
|
||||
template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2D));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void dropIndex() {
|
||||
template.indexOps(Venue.class).dropIndex("location_2d");
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.Venue;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
|
||||
/**
|
||||
* Modified from https://github.com/deftlabs/mongo-java-geospatial-example
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class GeoSpatial2DTests extends AbstractGeoSpatialTests {
|
||||
|
||||
@Test
|
||||
public void nearPoint() {
|
||||
Point point = new Point(-73.99171, 40.738868);
|
||||
List<Venue> venues = template.find(query(where("location").near(point).maxDistance(0.01)), Venue.class);
|
||||
assertThat(venues.size(), is(7));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-360
|
||||
public void indexInfoIsCorrect() {
|
||||
|
||||
IndexOperations operations = template.indexOps(Venue.class);
|
||||
List<IndexInfo> indexInfo = operations.getIndexInfo();
|
||||
|
||||
assertThat(indexInfo.size(), is(2));
|
||||
|
||||
List<IndexField> fields = indexInfo.get(0).getIndexFields();
|
||||
assertThat(fields.size(), is(1));
|
||||
assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC)));
|
||||
|
||||
fields = indexInfo.get(1).getIndexFields();
|
||||
assertThat(fields.size(), is(1));
|
||||
assertThat(fields, hasItem(IndexField.geo("location")));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createIndex() {
|
||||
template.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2D));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void dropIndex() {
|
||||
template.indexOps(Venue.class).dropIndex("location_2d");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,14 +15,17 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.junit.Before;
|
||||
@@ -31,9 +34,9 @@ import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.model.Property;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
@@ -98,15 +101,16 @@ public class BasicMongoPersistentPropertyUnitTests {
|
||||
@Test // DATAMONGO-607
|
||||
public void usesCustomFieldNamingStrategyByDefault() throws Exception {
|
||||
|
||||
ClassTypeInformation<Person> type = ClassTypeInformation.from(Person.class);
|
||||
Field field = ReflectionUtils.findField(Person.class, "lastname");
|
||||
|
||||
MongoPersistentProperty property = new BasicMongoPersistentProperty(Property.of(field), entity,
|
||||
MongoPersistentProperty property = new BasicMongoPersistentProperty(Property.of(type, field), entity,
|
||||
SimpleTypeHolder.DEFAULT, UppercaseFieldNamingStrategy.INSTANCE);
|
||||
assertThat(property.getFieldName(), is("LASTNAME"));
|
||||
|
||||
field = ReflectionUtils.findField(Person.class, "firstname");
|
||||
|
||||
property = new BasicMongoPersistentProperty(Property.of(field), entity, SimpleTypeHolder.DEFAULT,
|
||||
property = new BasicMongoPersistentProperty(Property.of(type, field), entity, SimpleTypeHolder.DEFAULT,
|
||||
UppercaseFieldNamingStrategy.INSTANCE);
|
||||
assertThat(property.getFieldName(), is("foo"));
|
||||
}
|
||||
@@ -114,8 +118,10 @@ public class BasicMongoPersistentPropertyUnitTests {
|
||||
@Test // DATAMONGO-607
|
||||
public void rejectsInvalidValueReturnedByFieldNamingStrategy() {
|
||||
|
||||
ClassTypeInformation<Person> type = ClassTypeInformation.from(Person.class);
|
||||
Field field = ReflectionUtils.findField(Person.class, "lastname");
|
||||
MongoPersistentProperty property = new BasicMongoPersistentProperty(Property.of(field), entity,
|
||||
|
||||
MongoPersistentProperty property = new BasicMongoPersistentProperty(Property.of(type, field), entity,
|
||||
SimpleTypeHolder.DEFAULT, InvalidFieldNamingStrategy.INSTANCE);
|
||||
|
||||
exception.expect(MappingException.class);
|
||||
@@ -183,21 +189,34 @@ public class BasicMongoPersistentPropertyUnitTests {
|
||||
assertThat(property.getFieldName(), is("myField"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1737
|
||||
public void honorsFieldOrderWhenIteratingOverProperties() {
|
||||
|
||||
MongoMappingContext context = new MongoMappingContext();
|
||||
BasicMongoPersistentEntity<?> entity = context.getPersistentEntity(Sample.class);
|
||||
|
||||
List<String> properties = new ArrayList<>();
|
||||
|
||||
entity.doWithProperties((MongoPersistentProperty property) -> properties.add(property.getName()));
|
||||
|
||||
assertThat(properties).containsExactly("first", "second", "third");
|
||||
}
|
||||
|
||||
private MongoPersistentProperty getPropertyFor(Field field) {
|
||||
return getPropertyFor(entity, field);
|
||||
}
|
||||
|
||||
private <T> MongoPersistentProperty getPropertyFor(Class<T> type, String fieldname) {
|
||||
private static <T> MongoPersistentProperty getPropertyFor(Class<T> type, String fieldname) {
|
||||
return getPropertyFor(new BasicMongoPersistentEntity<T>(ClassTypeInformation.from(type)), fieldname);
|
||||
}
|
||||
|
||||
private MongoPersistentProperty getPropertyFor(MongoPersistentEntity<?> persistentEntity, String fieldname) {
|
||||
return getPropertyFor(persistentEntity, ReflectionUtils.findField(persistentEntity.getType(), fieldname));
|
||||
private static MongoPersistentProperty getPropertyFor(MongoPersistentEntity<?> entity, String fieldname) {
|
||||
return getPropertyFor(entity, ReflectionUtils.findField(entity.getType(), fieldname));
|
||||
}
|
||||
|
||||
private MongoPersistentProperty getPropertyFor(MongoPersistentEntity<?> persistentEntity, Field field) {
|
||||
return new BasicMongoPersistentProperty(Property.of(field), persistentEntity, SimpleTypeHolder.DEFAULT,
|
||||
PropertyNameFieldNamingStrategy.INSTANCE);
|
||||
private static MongoPersistentProperty getPropertyFor(MongoPersistentEntity<?> entity, Field field) {
|
||||
return new BasicMongoPersistentProperty(Property.of(entity.getTypeInformation(), field), entity,
|
||||
SimpleTypeHolder.DEFAULT, PropertyNameFieldNamingStrategy.INSTANCE);
|
||||
}
|
||||
|
||||
class Person {
|
||||
@@ -210,6 +229,13 @@ public class BasicMongoPersistentPropertyUnitTests {
|
||||
@org.springframework.data.mongodb.core.mapping.Field(order = -20) String ssn;
|
||||
}
|
||||
|
||||
class Sample {
|
||||
|
||||
@org.springframework.data.mongodb.core.mapping.Field(order = 2) String second;
|
||||
@org.springframework.data.mongodb.core.mapping.Field(order = 3) String third;
|
||||
@org.springframework.data.mongodb.core.mapping.Field(order = 1) String first;
|
||||
}
|
||||
|
||||
enum UppercaseFieldNamingStrategy implements FieldNamingStrategy {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@@ -1,50 +1,50 @@
|
||||
/*
|
||||
* Copyright 2011-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
import org.springframework.data.mongodb.core.mapping.event.LoggingEventListener;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
public class GeoIndexedAppConfig extends AbstractMongoConfiguration {
|
||||
|
||||
public static String GEO_DB = "database";
|
||||
public static String GEO_COLLECTION = "geolocation";
|
||||
|
||||
@Override
|
||||
public String getDatabaseName() {
|
||||
return GEO_DB;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Bean
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient("127.0.0.1");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMappingBasePackage() {
|
||||
return "org.springframework.data.mongodb.core.core.mapping";
|
||||
}
|
||||
|
||||
@Bean
|
||||
public LoggingEventListener mappingEventsListener() {
|
||||
return new LoggingEventListener();
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2011-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
|
||||
import org.springframework.data.mongodb.core.mapping.event.LoggingEventListener;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
public class GeoIndexedAppConfig extends AbstractMongoConfiguration {
|
||||
|
||||
public static String GEO_DB = "database";
|
||||
public static String GEO_COLLECTION = "geolocation";
|
||||
|
||||
@Override
|
||||
public String getDatabaseName() {
|
||||
return GEO_DB;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Bean
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient("127.0.0.1");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMappingBasePackage() {
|
||||
return "org.springframework.data.mongodb.core.core.mapping";
|
||||
}
|
||||
|
||||
@Bean
|
||||
public LoggingEventListener mappingEventsListener() {
|
||||
return new LoggingEventListener();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
/*
|
||||
* Copyright (c) 2011-2017 by the original author(s).
|
||||
* Copyright 2011-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -15,23 +15,23 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import static org.hamcrest.collection.IsCollectionWithSize.*;
|
||||
import static org.hamcrest.core.Is.*;
|
||||
import static org.hamcrest.core.IsEqual.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.core.DocumentTestUtils.assertTypeHint;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.springframework.data.mongodb.core.DocumentTestUtils.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
|
||||
@@ -41,19 +41,17 @@ import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.PersonPojoStringId;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
import lombok.Data;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Integration test for Mapping Events.
|
||||
*
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class ApplicationContextEventTests {
|
||||
|
||||
@@ -64,28 +62,40 @@ public class ApplicationContextEventTests {
|
||||
private final String[] collectionsToDrop = new String[] { COLLECTION_NAME, ROOT_COLLECTION_NAME,
|
||||
RELATED_COLLECTION_NAME };
|
||||
|
||||
private static MongoClient mongo;
|
||||
private ApplicationContext applicationContext;
|
||||
private MongoTemplate template;
|
||||
private SimpleMappingEventListener simpleMappingEventListener;
|
||||
private SimpleMappingEventListener listener;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() {
|
||||
mongo = new MongoClient();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() {
|
||||
mongo.close();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
public void setUp() {
|
||||
|
||||
cleanDb();
|
||||
|
||||
applicationContext = new AnnotationConfigApplicationContext(ApplicationContextEventTestsAppConfig.class);
|
||||
template = applicationContext.getBean(MongoTemplate.class);
|
||||
template.setWriteConcern(WriteConcern.FSYNC_SAFE);
|
||||
simpleMappingEventListener = applicationContext.getBean(SimpleMappingEventListener.class);
|
||||
listener = applicationContext.getBean(SimpleMappingEventListener.class);
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanUp() throws Exception {
|
||||
public void cleanUp() {
|
||||
cleanDb();
|
||||
}
|
||||
|
||||
private void cleanDb() throws UnknownHostException {
|
||||
private void cleanDb() {
|
||||
|
||||
Mongo mongo = new MongoClient();
|
||||
DB db = mongo.getDB("database");
|
||||
MongoDatabase db = mongo.getDatabase("database");
|
||||
for (String coll : collectionsToDrop) {
|
||||
db.getCollection(coll).drop();
|
||||
}
|
||||
@@ -98,23 +108,23 @@ public class ApplicationContextEventTests {
|
||||
PersonBeforeSaveListener personBeforeSaveListener = applicationContext.getBean(PersonBeforeSaveListener.class);
|
||||
AfterSaveListener afterSaveListener = applicationContext.getBean(AfterSaveListener.class);
|
||||
|
||||
assertEquals(0, personBeforeSaveListener.seenEvents.size());
|
||||
assertEquals(0, afterSaveListener.seenEvents.size());
|
||||
assertThat(personBeforeSaveListener.seenEvents).isEmpty();
|
||||
assertThat(afterSaveListener.seenEvents).isEmpty();
|
||||
|
||||
assertEquals(0, simpleMappingEventListener.onBeforeSaveEvents.size());
|
||||
assertEquals(0, simpleMappingEventListener.onAfterSaveEvents.size());
|
||||
assertThat(listener.onBeforeSaveEvents).isEmpty();
|
||||
assertThat(listener.onAfterSaveEvents).isEmpty();
|
||||
|
||||
PersonPojoStringId p = new PersonPojoStringId("1", "Text");
|
||||
template.insert(p);
|
||||
|
||||
assertEquals(1, personBeforeSaveListener.seenEvents.size());
|
||||
assertEquals(1, afterSaveListener.seenEvents.size());
|
||||
assertThat(personBeforeSaveListener.seenEvents).hasSize(1);
|
||||
assertThat(afterSaveListener.seenEvents).hasSize(1);
|
||||
|
||||
assertEquals(1, simpleMappingEventListener.onBeforeSaveEvents.size());
|
||||
assertEquals(1, simpleMappingEventListener.onAfterSaveEvents.size());
|
||||
assertThat(listener.onBeforeSaveEvents).hasSize(1);
|
||||
assertThat(listener.onAfterSaveEvents).hasSize(1);
|
||||
|
||||
assertEquals(COLLECTION_NAME, simpleMappingEventListener.onBeforeSaveEvents.get(0).getCollectionName());
|
||||
assertEquals(COLLECTION_NAME, simpleMappingEventListener.onAfterSaveEvents.get(0).getCollectionName());
|
||||
assertThat(listener.onBeforeSaveEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
assertThat(listener.onAfterSaveEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
|
||||
Assert.assertTrue(personBeforeSaveListener.seenEvents.get(0) instanceof BeforeSaveEvent<?>);
|
||||
Assert.assertTrue(afterSaveListener.seenEvents.get(0) instanceof AfterSaveEvent<?>);
|
||||
@@ -142,14 +152,14 @@ public class ApplicationContextEventTests {
|
||||
|
||||
template.findOne(query(where("id").is(entity.getId())), PersonPojoStringId.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(1);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onBeforeConvertEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onBeforeConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
assertThat(listener.onBeforeConvertEvents).hasSize(1);
|
||||
assertThat(listener.onBeforeConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(1);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1256
|
||||
@@ -160,14 +170,14 @@ public class ApplicationContextEventTests {
|
||||
template.aggregate(Aggregation.newAggregation(Aggregation.project("text")), PersonPojoStringId.class,
|
||||
PersonPojoStringId.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(1);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onBeforeConvertEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onBeforeConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
assertThat(listener.onBeforeConvertEvents).hasSize(1);
|
||||
assertThat(listener.onBeforeConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(1);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1256
|
||||
@@ -178,15 +188,15 @@ public class ApplicationContextEventTests {
|
||||
|
||||
template.remove(entity);
|
||||
|
||||
assertThat(simpleMappingEventListener.onBeforeDeleteEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onBeforeDeleteEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
assertThat(listener.onBeforeDeleteEvents).hasSize(1);
|
||||
assertThat(listener.onBeforeDeleteEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterDeleteEvents.size(), is(1));
|
||||
assertThat(simpleMappingEventListener.onAfterDeleteEvents.get(0).getCollectionName(), is(COLLECTION_NAME));
|
||||
assertThat(listener.onAfterDeleteEvents).hasSize(1);
|
||||
assertThat(listener.onAfterDeleteEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1271
|
||||
public void publishesAfterLoadAndAfterConvertEventsForDBRef() throws Exception {
|
||||
public void publishesAfterLoadAndAfterConvertEventsForDBRef() {
|
||||
|
||||
Related ref1 = new Related(2L, "related desc1");
|
||||
|
||||
@@ -200,21 +210,17 @@ public class ApplicationContextEventTests {
|
||||
|
||||
template.findOne(query(where("id").is(source.getId())), Root.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(2));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(2);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(2));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(1).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(2);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1271
|
||||
public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingDBRef() throws Exception {
|
||||
public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingDBRef() {
|
||||
|
||||
Related ref1 = new Related(2L, "related desc1");
|
||||
|
||||
@@ -228,27 +234,23 @@ public class ApplicationContextEventTests {
|
||||
|
||||
Root target = template.findOne(query(where("id").is(source.getId())), Root.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(1));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(1);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(1));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(1);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
|
||||
target.getLazyReference().getDescription();
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(2));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(2);
|
||||
assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(2));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(2);
|
||||
assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1271
|
||||
public void publishesAfterLoadAndAfterConvertEventsForListOfDBRef() throws Exception {
|
||||
public void publishesAfterLoadAndAfterConvertEventsForListOfDBRef() {
|
||||
|
||||
List<Related> references = Arrays.asList(new Related(20L, "ref 1"), new Related(30L, "ref 2"));
|
||||
|
||||
@@ -262,25 +264,19 @@ public class ApplicationContextEventTests {
|
||||
|
||||
template.findOne(query(where("id").is(source.getId())), Root.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(3));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(2).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(3);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterLoadEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(3));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(2).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(3);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents.get(2).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1271
|
||||
public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingListOfDBRef() throws Exception {
|
||||
public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingListOfDBRef() {
|
||||
|
||||
List<Related> references = Arrays.asList(new Related(20L, "ref 1"), new Related(30L, "ref 2"));
|
||||
|
||||
@@ -294,30 +290,24 @@ public class ApplicationContextEventTests {
|
||||
|
||||
Root target = template.findOne(query(where("id").is(source.getId())), Root.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(1));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(1));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(1);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(1);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
|
||||
target.getLazyListOfReferences().size();
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(3));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(3));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(3);
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(3);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(2).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(2).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterLoadEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1271
|
||||
public void publishesAfterLoadAndAfterConvertEventsForMapOfDBRef() throws Exception {
|
||||
public void publishesAfterLoadAndAfterConvertEventsForMapOfDBRef() {
|
||||
|
||||
Map<String, Related> references = new LinkedHashMap<String, Related>();
|
||||
references.put("ref-1", new Related(20L, "ref 1"));
|
||||
@@ -333,25 +323,19 @@ public class ApplicationContextEventTests {
|
||||
|
||||
template.findOne(query(where("id").is(source.getId())), Root.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(3));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(2).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(3);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterLoadEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(3));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(2).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(3);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents.get(2).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1271
|
||||
public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingMapOfDBRef() throws Exception {
|
||||
public void publishesAfterLoadAndAfterConvertEventsForLazyLoadingMapOfDBRef() {
|
||||
|
||||
Map<String, Related> references = new LinkedHashMap<String, Related>();
|
||||
references.put("ref-1", new Related(20L, "ref 1"));
|
||||
@@ -367,36 +351,48 @@ public class ApplicationContextEventTests {
|
||||
|
||||
Root target = template.findOne(query(where("id").is(source.getId())), Root.class);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(1));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(1);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(1));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(0).getCollectionName(),
|
||||
is(equalTo(ROOT_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(1);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(ROOT_COLLECTION_NAME);
|
||||
|
||||
target.getLazyMapOfReferences().size();
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents, hasSize(3));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterLoadEvents.get(2).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(3);
|
||||
assertThat(listener.onAfterLoadEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterLoadEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents, hasSize(3));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(1).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(simpleMappingEventListener.onAfterConvertEvents.get(2).getCollectionName(),
|
||||
is(equalTo(RELATED_COLLECTION_NAME)));
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(3);
|
||||
assertThat(listener.onAfterConvertEvents.get(1).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
assertThat(listener.onAfterConvertEvents.get(2).getCollectionName()).isEqualTo(RELATED_COLLECTION_NAME);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1823
|
||||
public void publishesAfterConvertEventForFindQueriesUsingProjections() {
|
||||
|
||||
PersonPojoStringId entity = new PersonPojoStringId("1", "Text");
|
||||
template.insert(entity);
|
||||
|
||||
template.query(PersonPojoStringId.class).matching(query(where("id").is(entity.getId()))).all();
|
||||
|
||||
assertThat(listener.onAfterLoadEvents).hasSize(1);
|
||||
assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
|
||||
assertThat(listener.onBeforeConvertEvents).hasSize(1);
|
||||
assertThat(listener.onBeforeConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
|
||||
assertThat(listener.onAfterConvertEvents).hasSize(1);
|
||||
assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME);
|
||||
}
|
||||
|
||||
private void comparePersonAndDocument(PersonPojoStringId p, PersonPojoStringId p2, org.bson.Document document) {
|
||||
|
||||
assertEquals(p.getId(), p2.getId());
|
||||
assertEquals(p.getText(), p2.getText());
|
||||
assertThat(p2.getId()).isEqualTo(p.getId());
|
||||
assertThat(p2.getText()).isEqualTo(p.getText());
|
||||
|
||||
assertEquals("1", document.get("_id"));
|
||||
assertEquals("Text", document.get("text"));
|
||||
assertThat(document.get("_id")).isEqualTo("1");
|
||||
assertThat(document.get("text")).isEqualTo("Text");
|
||||
assertTypeHint(document, PersonPojoStringId.class);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,216 +1,216 @@
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonLineString;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class CriteriaTests {
|
||||
|
||||
@Test
|
||||
public void testSimpleCriteria() {
|
||||
Criteria c = new Criteria("name").is("Bubba");
|
||||
assertEquals(Document.parse("{ \"name\" : \"Bubba\"}"), c.getCriteriaObject());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNotEqualCriteria() {
|
||||
Criteria c = new Criteria("name").ne("Bubba");
|
||||
assertEquals(Document.parse("{ \"name\" : { \"$ne\" : \"Bubba\"}}"), c.getCriteriaObject());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void buildsIsNullCriteriaCorrectly() {
|
||||
|
||||
Document reference = new Document("name", null);
|
||||
|
||||
Criteria criteria = new Criteria("name").is(null);
|
||||
assertThat(criteria.getCriteriaObject(), is(reference));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChainedCriteria() {
|
||||
Criteria c = new Criteria("name").is("Bubba").and("age").lt(21);
|
||||
assertEquals(Document.parse("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}"), c.getCriteriaObject());
|
||||
}
|
||||
|
||||
@Test(expected = InvalidMongoDbApiUsageException.class)
|
||||
public void testCriteriaWithMultipleConditionsForSameKey() {
|
||||
Criteria c = new Criteria("name").gte("M").and("name").ne("A");
|
||||
c.getCriteriaObject();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void equalIfCriteriaMatches() {
|
||||
|
||||
Criteria left = new Criteria("name").is("Foo").and("lastname").is("Bar");
|
||||
Criteria right = new Criteria("name").is("Bar").and("lastname").is("Bar");
|
||||
|
||||
assertThat(left, is(not(right)));
|
||||
assertThat(right, is(not(left)));
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-507
|
||||
public void shouldThrowExceptionWhenTryingToNegateAndOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.andOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-507
|
||||
public void shouldThrowExceptionWhenTryingToNegateOrOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.orOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-507
|
||||
public void shouldThrowExceptionWhenTryingToNegateNorOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.norOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-507
|
||||
public void shouldNegateFollowingSimpleExpression() {
|
||||
|
||||
Criteria c = Criteria.where("age").not().gt(18).and("status").is("student");
|
||||
Document co = c.getCriteriaObject();
|
||||
|
||||
assertThat(co, is(notNullValue()));
|
||||
assertThat(co, is(Document.parse("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}")));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1068
|
||||
public void getCriteriaObjectShouldReturnEmptyDocumentWhenNoCriteriaSpecified() {
|
||||
|
||||
Document document = new Criteria().getCriteriaObject();
|
||||
|
||||
assertThat(document, equalTo(new Document()));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1068
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() {
|
||||
|
||||
Document document = new Criteria().lt("foo").getCriteriaObject();
|
||||
|
||||
assertThat(document, equalTo(new Document().append("$lt", "foo")));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1068
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() {
|
||||
|
||||
Document document = new Criteria().lt("foo").gt("bar").getCriteriaObject();
|
||||
|
||||
assertThat(document, equalTo(new Document().append("$lt", "foo").append("$gt", "bar")));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1068
|
||||
public void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() {
|
||||
|
||||
Document document = new Criteria().lt("foo").not().getCriteriaObject();
|
||||
|
||||
assertThat(document, equalTo(new Document().append("$not", new Document("$lt", "foo"))));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1135
|
||||
public void geoJsonTypesShouldBeWrappedInGeometry() {
|
||||
|
||||
Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$near.$geometry", new GeoJsonPoint(100, 200)));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1135
|
||||
public void legacyCoordinateTypesShouldNotBeWrappedInGeometry() {
|
||||
|
||||
Document document = new Criteria("foo").near(new Point(100, 200)).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().notContaining("foo.$near.$geometry"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1135
|
||||
public void maxDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$near.$maxDistance", 50D));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1135
|
||||
public void maxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$nearSphere.$maxDistance", 50D));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void minDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$near.$minDistance", 50D));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void minDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$nearSphere.$minDistance", 50D));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void minAndMaxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).maxDistance(100D)
|
||||
.getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$nearSphere.$minDistance", 50D));
|
||||
assertThat(document, isBsonObject().containing("foo.$nearSphere.$maxDistance", 100D));
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1134
|
||||
public void intersectsShouldThrowExceptionWhenCalledWihtNullValue() {
|
||||
new Criteria("foo").intersects(null);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1134
|
||||
public void intersectsShouldWrapGeoJsonTypeInGeometryCorrectly() {
|
||||
|
||||
GeoJsonLineString lineString = new GeoJsonLineString(new Point(0, 0), new Point(10, 10));
|
||||
Document document = new Criteria("foo").intersects(lineString).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$geoIntersects.$geometry", lineString));
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.springframework.data.mongodb.test.util.IsBsonObject.*;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonLineString;
|
||||
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
|
||||
|
||||
/**
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class CriteriaTests {
|
||||
|
||||
@Test
|
||||
public void testSimpleCriteria() {
|
||||
Criteria c = new Criteria("name").is("Bubba");
|
||||
assertEquals(Document.parse("{ \"name\" : \"Bubba\"}"), c.getCriteriaObject());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNotEqualCriteria() {
|
||||
Criteria c = new Criteria("name").ne("Bubba");
|
||||
assertEquals(Document.parse("{ \"name\" : { \"$ne\" : \"Bubba\"}}"), c.getCriteriaObject());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void buildsIsNullCriteriaCorrectly() {
|
||||
|
||||
Document reference = new Document("name", null);
|
||||
|
||||
Criteria criteria = new Criteria("name").is(null);
|
||||
assertThat(criteria.getCriteriaObject(), is(reference));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChainedCriteria() {
|
||||
Criteria c = new Criteria("name").is("Bubba").and("age").lt(21);
|
||||
assertEquals(Document.parse("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}"), c.getCriteriaObject());
|
||||
}
|
||||
|
||||
@Test(expected = InvalidMongoDbApiUsageException.class)
|
||||
public void testCriteriaWithMultipleConditionsForSameKey() {
|
||||
Criteria c = new Criteria("name").gte("M").and("name").ne("A");
|
||||
c.getCriteriaObject();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void equalIfCriteriaMatches() {
|
||||
|
||||
Criteria left = new Criteria("name").is("Foo").and("lastname").is("Bar");
|
||||
Criteria right = new Criteria("name").is("Bar").and("lastname").is("Bar");
|
||||
|
||||
assertThat(left, is(not(right)));
|
||||
assertThat(right, is(not(left)));
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-507
|
||||
public void shouldThrowExceptionWhenTryingToNegateAndOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.andOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-507
|
||||
public void shouldThrowExceptionWhenTryingToNegateOrOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.orOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-507
|
||||
public void shouldThrowExceptionWhenTryingToNegateNorOperation() {
|
||||
|
||||
new Criteria() //
|
||||
.not() //
|
||||
.norOperator(Criteria.where("delete").is(true).and("_id").is(42)); //
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-507
|
||||
public void shouldNegateFollowingSimpleExpression() {
|
||||
|
||||
Criteria c = Criteria.where("age").not().gt(18).and("status").is("student");
|
||||
Document co = c.getCriteriaObject();
|
||||
|
||||
assertThat(co, is(notNullValue()));
|
||||
assertThat(co, is(Document.parse("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}")));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1068
|
||||
public void getCriteriaObjectShouldReturnEmptyDocumentWhenNoCriteriaSpecified() {
|
||||
|
||||
Document document = new Criteria().getCriteriaObject();
|
||||
|
||||
assertThat(document, equalTo(new Document()));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1068
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() {
|
||||
|
||||
Document document = new Criteria().lt("foo").getCriteriaObject();
|
||||
|
||||
assertThat(document, equalTo(new Document().append("$lt", "foo")));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1068
|
||||
public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() {
|
||||
|
||||
Document document = new Criteria().lt("foo").gt("bar").getCriteriaObject();
|
||||
|
||||
assertThat(document, equalTo(new Document().append("$lt", "foo").append("$gt", "bar")));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1068
|
||||
public void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() {
|
||||
|
||||
Document document = new Criteria().lt("foo").not().getCriteriaObject();
|
||||
|
||||
assertThat(document, equalTo(new Document().append("$not", new Document("$lt", "foo"))));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1135
|
||||
public void geoJsonTypesShouldBeWrappedInGeometry() {
|
||||
|
||||
Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$near.$geometry", new GeoJsonPoint(100, 200)));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1135
|
||||
public void legacyCoordinateTypesShouldNotBeWrappedInGeometry() {
|
||||
|
||||
Document document = new Criteria("foo").near(new Point(100, 200)).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().notContaining("foo.$near.$geometry"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1135
|
||||
public void maxDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$near.$maxDistance", 50D));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1135
|
||||
public void maxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$nearSphere.$maxDistance", 50D));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void minDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$near.$minDistance", 50D));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void minDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$nearSphere.$minDistance", 50D));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1110
|
||||
public void minAndMaxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() {
|
||||
|
||||
Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).maxDistance(100D)
|
||||
.getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$nearSphere.$minDistance", 50D));
|
||||
assertThat(document, isBsonObject().containing("foo.$nearSphere.$maxDistance", 100D));
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1134
|
||||
public void intersectsShouldThrowExceptionWhenCalledWihtNullValue() {
|
||||
new Criteria("foo").intersects(null);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1134
|
||||
public void intersectsShouldWrapGeoJsonTypeInGeometryCorrectly() {
|
||||
|
||||
GeoJsonLineString lineString = new GeoJsonLineString(new Point(0, 0), new Point(10, 10));
|
||||
Document document = new Criteria("foo").intersects(lineString).getCriteriaObject();
|
||||
|
||||
assertThat(document, isBsonObject().containing("foo.$geoIntersects.$geometry", lineString));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,95 +1,95 @@
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.index.Index;
|
||||
import org.springframework.data.mongodb.core.index.Index.Duplicates;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link Index}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
public class IndexUnitTests {
|
||||
|
||||
@Test
|
||||
public void testWithAscendingIndex() {
|
||||
Index i = new Index().on("name", Direction.ASC);
|
||||
assertEquals(Document.parse("{ \"name\" : 1}"), i.getIndexKeys());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithDescendingIndex() {
|
||||
Index i = new Index().on("name", Direction.DESC);
|
||||
assertEquals(Document.parse("{ \"name\" : -1}"), i.getIndexKeys());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNamedMultiFieldUniqueIndex() {
|
||||
Index i = new Index().on("name", Direction.ASC).on("age", Direction.DESC);
|
||||
i.named("test").unique();
|
||||
assertEquals(Document.parse("{ \"name\" : 1 , \"age\" : -1}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ \"name\" : \"test\" , \"unique\" : true}"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithSparse() {
|
||||
Index i = new Index().on("name", Direction.ASC);
|
||||
i.sparse().unique();
|
||||
assertEquals(Document.parse("{ \"name\" : 1}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ \"unique\" : true , \"sparse\" : true}"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGeospatialIndex() {
|
||||
GeospatialIndex i = new GeospatialIndex("location").withMin(0);
|
||||
assertEquals(Document.parse("{ \"location\" : \"2d\"}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ \"min\" : 0}"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-778
|
||||
public void testGeospatialIndex2DSphere() {
|
||||
|
||||
GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE);
|
||||
assertEquals(Document.parse("{ \"location\" : \"2dsphere\"}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ }"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-778
|
||||
public void testGeospatialIndexGeoHaystack() {
|
||||
|
||||
GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_HAYSTACK)
|
||||
.withAdditionalField("name").withBucketSize(40);
|
||||
assertEquals(Document.parse("{ \"location\" : \"geoHaystack\" , \"name\" : 1}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ \"bucketSize\" : 40.0}"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void ensuresPropertyOrder() {
|
||||
|
||||
Index on = new Index("foo", Direction.ASC).on("bar", Direction.ASC);
|
||||
assertThat(on.getIndexKeys(), is(Document.parse("{ \"foo\" : 1 , \"bar\" : 1}")));
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
|
||||
import org.springframework.data.mongodb.core.index.GeospatialIndex;
|
||||
import org.springframework.data.mongodb.core.index.Index;
|
||||
import org.springframework.data.mongodb.core.index.Index.Duplicates;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link Index}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Laurent Canet
|
||||
*/
|
||||
public class IndexUnitTests {
|
||||
|
||||
@Test
|
||||
public void testWithAscendingIndex() {
|
||||
Index i = new Index().on("name", Direction.ASC);
|
||||
assertEquals(Document.parse("{ \"name\" : 1}"), i.getIndexKeys());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithDescendingIndex() {
|
||||
Index i = new Index().on("name", Direction.DESC);
|
||||
assertEquals(Document.parse("{ \"name\" : -1}"), i.getIndexKeys());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNamedMultiFieldUniqueIndex() {
|
||||
Index i = new Index().on("name", Direction.ASC).on("age", Direction.DESC);
|
||||
i.named("test").unique();
|
||||
assertEquals(Document.parse("{ \"name\" : 1 , \"age\" : -1}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ \"name\" : \"test\" , \"unique\" : true}"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithSparse() {
|
||||
Index i = new Index().on("name", Direction.ASC);
|
||||
i.sparse().unique();
|
||||
assertEquals(Document.parse("{ \"name\" : 1}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ \"unique\" : true , \"sparse\" : true}"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGeospatialIndex() {
|
||||
GeospatialIndex i = new GeospatialIndex("location").withMin(0);
|
||||
assertEquals(Document.parse("{ \"location\" : \"2d\"}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ \"min\" : 0}"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-778
|
||||
public void testGeospatialIndex2DSphere() {
|
||||
|
||||
GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE);
|
||||
assertEquals(Document.parse("{ \"location\" : \"2dsphere\"}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ }"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-778
|
||||
public void testGeospatialIndexGeoHaystack() {
|
||||
|
||||
GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_HAYSTACK)
|
||||
.withAdditionalField("name").withBucketSize(40);
|
||||
assertEquals(Document.parse("{ \"location\" : \"geoHaystack\" , \"name\" : 1}"), i.getIndexKeys());
|
||||
assertEquals(Document.parse("{ \"bucketSize\" : 40.0}"), i.getIndexOptions());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void ensuresPropertyOrder() {
|
||||
|
||||
Index on = new Index("foo", Direction.ASC).on("bar", Direction.ASC);
|
||||
assertThat(on.getIndexKeys(), is(Document.parse("{ \"foo\" : 1 , \"bar\" : 1}")));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,54 +1,54 @@
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
|
||||
/**
|
||||
* Unit tests for sorting.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class SortTests {
|
||||
|
||||
@Test
|
||||
public void testWithSortAscending() {
|
||||
|
||||
Query s = new Query().with(Sort.by(Direction.ASC, "name"));
|
||||
assertEquals(Document.parse("{ \"name\" : 1}"), s.getSortObject());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithSortDescending() {
|
||||
|
||||
Query s = new Query().with(Sort.by(Direction.DESC, "name"));
|
||||
assertEquals(Document.parse("{ \"name\" : -1}"), s.getSortObject());
|
||||
}
|
||||
|
||||
@Test // DATADOC-177
|
||||
public void preservesOrderKeysOnMultipleSorts() {
|
||||
|
||||
Query sort = new Query().with(Sort.by(Direction.DESC, "foo").and(Sort.by(Direction.DESC, "bar")));
|
||||
assertThat(sort.getSortObject(), is(Document.parse("{ \"foo\" : -1 , \"bar\" : -1}")));
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.Test;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
|
||||
/**
|
||||
* Unit tests for sorting.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class SortTests {
|
||||
|
||||
@Test
|
||||
public void testWithSortAscending() {
|
||||
|
||||
Query s = new Query().with(Sort.by(Direction.ASC, "name"));
|
||||
assertEquals(Document.parse("{ \"name\" : 1}"), s.getSortObject());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithSortDescending() {
|
||||
|
||||
Query s = new Query().with(Sort.by(Direction.DESC, "name"));
|
||||
assertEquals(Document.parse("{ \"name\" : -1}"), s.getSortObject());
|
||||
}
|
||||
|
||||
@Test // DATADOC-177
|
||||
public void preservesOrderKeysOnMultipleSorts() {
|
||||
|
||||
Query sort = new Query().with(Sort.by(Direction.DESC, "foo").and(Sort.by(Direction.DESC, "bar")));
|
||||
assertThat(sort.getSortObject(), is(Document.parse("{ \"foo\" : -1 , \"bar\" : -1}")));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
/*
|
||||
* Copyright 2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.gridfs;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import org.bson.BsonObjectId;
|
||||
import org.bson.Document;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.mongodb.MongoGridFSException;
|
||||
import com.mongodb.client.gridfs.model.GridFSFile;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link GridFsResource}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @auhtor Christoph Strobl
|
||||
*/
|
||||
public class GridFsResourceUnitTests {
|
||||
|
||||
@Test // DATAMONGO-1850
|
||||
public void shouldReadContentTypeCorrectly() {
|
||||
|
||||
Document metadata = new Document(GridFsResource.CONTENT_TYPE_FIELD, "text/plain");
|
||||
GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), "foo", metadata);
|
||||
GridFsResource resource = new GridFsResource(file);
|
||||
|
||||
assertThat(resource.getContentType()).isEqualTo("text/plain");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1850
|
||||
public void shouldThrowExceptionOnEmptyContentType() {
|
||||
|
||||
GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), "foo", null);
|
||||
GridFsResource resource = new GridFsResource(file);
|
||||
|
||||
assertThatThrownBy(resource::getContentType).isInstanceOf(MongoGridFSException.class);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1850
|
||||
public void shouldThrowExceptionOnEmptyContentTypeInMetadata() {
|
||||
|
||||
GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), "foo", new Document());
|
||||
GridFsResource resource = new GridFsResource(file);
|
||||
|
||||
assertThatThrownBy(resource::getContentType).isInstanceOf(MongoGridFSException.class);
|
||||
}
|
||||
}
|
||||
@@ -15,11 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.gridfs;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.where;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*;
|
||||
|
||||
@@ -42,6 +39,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.MongoGridFSException;
|
||||
import com.mongodb.client.gridfs.GridFSFindIterable;
|
||||
|
||||
/**
|
||||
@@ -73,8 +71,8 @@ public class GridFsTemplateIntegrationTests {
|
||||
List<com.mongodb.client.gridfs.model.GridFSFile> files = new ArrayList<com.mongodb.client.gridfs.model.GridFSFile>();
|
||||
GridFSFindIterable result = operations.find(query(where("_id").is(reference)));
|
||||
result.into(files);
|
||||
assertThat(files.size(), is(1));
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), reference);
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-6
|
||||
@@ -87,8 +85,8 @@ public class GridFsTemplateIntegrationTests {
|
||||
GridFSFindIterable result = operations.find(query(whereMetaData("key").is("value")));
|
||||
result.into(files);
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), reference);
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-6
|
||||
@@ -103,8 +101,8 @@ public class GridFsTemplateIntegrationTests {
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("foo.xml")));
|
||||
result.into(files);
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), reference);
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-6
|
||||
@@ -114,10 +112,9 @@ public class GridFsTemplateIntegrationTests {
|
||||
|
||||
GridFsResource[] resources = operations.getResources("*.xml");
|
||||
|
||||
assertThat(resources.length, is(1));
|
||||
assertThat(((BsonObjectId) resources[0].getId()).getValue(), is(reference));
|
||||
assertThat(resources[0].contentLength(), is(resource.contentLength()));
|
||||
// assertThat(resources[0].getContentType(), is(resource.()));
|
||||
assertThat(resources.length).isEqualTo(1);
|
||||
assertThat(((BsonObjectId) resources[0].getId()).getValue()).isEqualTo(reference);
|
||||
assertThat(resources[0].contentLength()).isEqualTo(resource.contentLength());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-6
|
||||
@@ -126,10 +123,9 @@ public class GridFsTemplateIntegrationTests {
|
||||
ObjectId reference = operations.store(resource.getInputStream(), "foo.xml");
|
||||
|
||||
GridFsResource[] resources = operations.getResources("foo.xml");
|
||||
assertThat(resources.length, is(1));
|
||||
assertThat(((BsonObjectId) resources[0].getId()).getValue(), is(reference));
|
||||
assertThat(resources[0].contentLength(), is(resource.contentLength()));
|
||||
// assertThat(resources[0].getContentType(), is(reference.getContentType()));
|
||||
assertThat(resources.length).isEqualTo(1);
|
||||
assertThat(((BsonObjectId) resources[0].getId()).getValue()).isEqualTo(reference);
|
||||
assertThat(resources[0].contentLength()).isEqualTo(resource.contentLength());
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-503
|
||||
@@ -141,8 +137,8 @@ public class GridFsTemplateIntegrationTests {
|
||||
GridFSFindIterable result = operations.find(query(whereContentType().is("application/xml")));
|
||||
result.into(files);
|
||||
|
||||
assertThat(files.size(), is(1));
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), reference);
|
||||
assertThat(files.size()).isEqualTo(1);
|
||||
assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-534
|
||||
@@ -158,10 +154,8 @@ public class GridFsTemplateIntegrationTests {
|
||||
GridFSFindIterable result = operations.find(query);
|
||||
result.into(files);
|
||||
|
||||
assertThat(files, hasSize(3));
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), first);
|
||||
assertEquals(((BsonObjectId) files.get(1).getId()).getValue(), second);
|
||||
assertEquals(((BsonObjectId) files.get(2).getId()).getValue(), third);
|
||||
assertThat(files).hasSize(3).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(first, second,
|
||||
third);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-534, DATAMONGO-1762
|
||||
@@ -169,12 +163,11 @@ public class GridFsTemplateIntegrationTests {
|
||||
|
||||
ObjectId reference = operations.store(resource.getInputStream(), "foo.xml");
|
||||
|
||||
List<com.mongodb.client.gridfs.model.GridFSFile> files = new ArrayList<com.mongodb.client.gridfs.model.GridFSFile>();
|
||||
List<com.mongodb.client.gridfs.model.GridFSFile> files = new ArrayList<>();
|
||||
GridFSFindIterable result = operations.find(new Query());
|
||||
result.into(files);
|
||||
|
||||
assertThat(files, hasSize(1));
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), reference);
|
||||
assertThat(files).hasSize(1).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(reference);
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class) // DATAMONGO-1762
|
||||
@@ -184,7 +177,7 @@ public class GridFsTemplateIntegrationTests {
|
||||
|
||||
@Test // DATAMONGO-813
|
||||
public void getResourceShouldReturnNullForNonExistingResource() {
|
||||
assertThat(operations.getResource("doesnotexist"), is(nullValue()));
|
||||
assertThat(operations.getResource("doesnotexist")).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-809
|
||||
@@ -197,8 +190,7 @@ public class GridFsTemplateIntegrationTests {
|
||||
GridFSFindIterable result = operations.find(query(whereMetaData("key").is("value")));
|
||||
result.into(files);
|
||||
|
||||
assertThat(files, hasSize(1));
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), reference);
|
||||
assertThat(files).hasSize(1).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(reference);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-809
|
||||
@@ -212,8 +204,7 @@ public class GridFsTemplateIntegrationTests {
|
||||
GridFSFindIterable result = operations.find(query(whereMetaData("version").is("1.0")));
|
||||
result.into(files);
|
||||
|
||||
assertThat(files, hasSize(1));
|
||||
assertEquals(((BsonObjectId) files.get(0).getId()).getValue(), reference);
|
||||
assertThat(files).hasSize(1).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(reference);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1695
|
||||
@@ -224,6 +215,15 @@ public class GridFsTemplateIntegrationTests {
|
||||
assertThat(operations.getResource("someName").getContentType()).isEqualTo("contentType");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1850
|
||||
public void failsOnNonExistingContentTypeRetrieval() throws IOException {
|
||||
|
||||
operations.store(resource.getInputStream(), "no-content-type", (String) null);
|
||||
GridFsResource result = operations.getResource("no-content-type");
|
||||
|
||||
assertThatThrownBy(() -> result.getContentType()).isInstanceOf(MongoGridFSException.class);
|
||||
}
|
||||
|
||||
class Metadata {
|
||||
String version;
|
||||
}
|
||||
|
||||
@@ -1,71 +1,71 @@
|
||||
/*
|
||||
* Copyright 2002-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.monitor;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* This test class assumes that you are already running the MongoDB server.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class MongoMonitorIntegrationTests {
|
||||
|
||||
@Autowired MongoClient mongoClient;
|
||||
|
||||
@Test
|
||||
public void serverInfo() {
|
||||
ServerInfo serverInfo = new ServerInfo(mongoClient);
|
||||
serverInfo.getVersion();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-685
|
||||
public void getHostNameShouldReturnServerNameReportedByMongo() throws UnknownHostException {
|
||||
|
||||
ServerInfo serverInfo = new ServerInfo(mongoClient);
|
||||
|
||||
String hostName = null;
|
||||
try {
|
||||
hostName = serverInfo.getHostName();
|
||||
} catch (UnknownHostException e) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
assertThat(hostName, is(notNullValue()));
|
||||
assertThat(hostName, is("127.0.0.1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void operationCounters() {
|
||||
OperationCounters operationCounters = new OperationCounters(mongoClient);
|
||||
operationCounters.getInsertCount();
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Copyright 2002-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.monitor;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* This test class assumes that you are already running the MongoDB server.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:infrastructure.xml")
|
||||
public class MongoMonitorIntegrationTests {
|
||||
|
||||
@Autowired MongoClient mongoClient;
|
||||
|
||||
@Test
|
||||
public void serverInfo() {
|
||||
ServerInfo serverInfo = new ServerInfo(mongoClient);
|
||||
serverInfo.getVersion();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-685
|
||||
public void getHostNameShouldReturnServerNameReportedByMongo() throws UnknownHostException {
|
||||
|
||||
ServerInfo serverInfo = new ServerInfo(mongoClient);
|
||||
|
||||
String hostName = null;
|
||||
try {
|
||||
hostName = serverInfo.getHostName();
|
||||
} catch (UnknownHostException e) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
assertThat(hostName, is(notNullValue()));
|
||||
assertThat(hostName, is("127.0.0.1"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void operationCounters() {
|
||||
OperationCounters operationCounters = new OperationCounters(mongoClient);
|
||||
operationCounters.getInsertCount();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.test.util;
|
||||
|
||||
import org.assertj.core.error.BasicErrorMessageFactory;
|
||||
import org.assertj.core.error.ErrorMessageFactory;
|
||||
import org.assertj.core.internal.StandardComparisonStrategy;
|
||||
|
||||
/**
|
||||
* Utility class providing factory methods for {@link ErrorMessageFactory}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class AssertErrors {
|
||||
|
||||
/**
|
||||
* Creates a new {@link ShouldHaveProperty}.
|
||||
*
|
||||
* @param actual the actual value in the failed assertion.
|
||||
* @param key the key used in the failed assertion to compare the actual property key to.
|
||||
* @param value the value used in the failed assertion to compare the actual property value to.
|
||||
* @return the created {@link ErrorMessageFactory}.
|
||||
*/
|
||||
public static ErrorMessageFactory shouldHaveProperty(Object actual, String key, Object value) {
|
||||
return new ShouldHaveProperty(actual, key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ShouldNotHaveProperty}.
|
||||
*
|
||||
* @param actual the actual value in the failed assertion.
|
||||
* @param key the key used in the failed assertion to compare the actual property key to.
|
||||
* @param value the value used in the failed assertion to compare the actual property value to.
|
||||
* @return the created {@link ErrorMessageFactory}.
|
||||
*/
|
||||
public static ErrorMessageFactory shouldNotHaveProperty(Object actual, String key, Object value) {
|
||||
return new ShouldNotHaveProperty(actual, key, value);
|
||||
}
|
||||
|
||||
private static class ShouldHaveProperty extends BasicErrorMessageFactory {
|
||||
|
||||
private ShouldHaveProperty(Object actual, String key, Object value) {
|
||||
super("\n" + //
|
||||
"Expecting:\n" + //
|
||||
" <%s>\n" + //
|
||||
"to have property with key:\n" + //
|
||||
" <%s>\n" + //
|
||||
"and value:\n" + //
|
||||
" <%s>\n" + //
|
||||
"%s", actual, key, value, StandardComparisonStrategy.instance());
|
||||
}
|
||||
}
|
||||
|
||||
private static class ShouldNotHaveProperty extends BasicErrorMessageFactory {
|
||||
|
||||
private ShouldNotHaveProperty(Object actual, String key, Object value) {
|
||||
super("\n" + //
|
||||
"Expecting:\n" + //
|
||||
" <%s>\n" + //
|
||||
"not to have property with key:\n" + //
|
||||
" <%s>\n" + //
|
||||
"and value:\n" + //
|
||||
" <%s>\n" + //
|
||||
"but actually found such property %s", actual, key, value, StandardComparisonStrategy.instance());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.test.util;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
/**
|
||||
* The entry point for all MongoDB assertions. This class extends {@link org.assertj.core.api.Assertions} for
|
||||
* convenience to statically import a single class.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public abstract class Assertions extends org.assertj.core.api.Assertions {
|
||||
|
||||
private Assertions() {
|
||||
// no instances allowed.
|
||||
}
|
||||
|
||||
/**
|
||||
* Create assertion for {@link Document}.
|
||||
*
|
||||
* @param actual the actual value.
|
||||
* @return the created assertion object.
|
||||
*/
|
||||
public static DocumentAssert assertThat(Document document) {
|
||||
return new DocumentAssert(document);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,384 @@
|
||||
/*
|
||||
* Copyright 2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.test.util;
|
||||
|
||||
import static org.assertj.core.error.ElementsShouldBe.*;
|
||||
import static org.assertj.core.error.ShouldContain.*;
|
||||
import static org.assertj.core.error.ShouldContainKeys.*;
|
||||
import static org.assertj.core.error.ShouldNotContain.*;
|
||||
import static org.assertj.core.error.ShouldNotContainKeys.*;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.assertj.core.api.AbstractMapAssert;
|
||||
import org.assertj.core.api.Condition;
|
||||
import org.assertj.core.error.ShouldContainAnyOf;
|
||||
import org.assertj.core.internal.Failures;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Assertions for Mongo's {@link Document}. Assertions based on keys/entries are translated to document paths allowing
|
||||
* to assert nested elements.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* Document document = Document.parse("{ $set: { concreteInnerList: [ { foo: "bar", _class: … }] } }");
|
||||
*
|
||||
* assertThat(mappedUpdate).containsKey("$set.concreteInnerList.[0].foo").doesNotContainKey("$set.concreteInnerList.[0].bar");
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class DocumentAssert extends AbstractMapAssert<DocumentAssert, Map<String, Object>, String, Object> {
|
||||
|
||||
private final Document actual;
|
||||
|
||||
DocumentAssert(Document actual) {
|
||||
|
||||
super(actual, DocumentAssert.class);
|
||||
this.actual = actual;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#containsEntry(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DocumentAssert containsEntry(String key, Object value) {
|
||||
|
||||
Assert.hasText(key, "The key to look for must not be empty!");
|
||||
|
||||
Lookup<?> lookup = lookup(key);
|
||||
|
||||
if (!lookup.isPathFound() || !ObjectUtils.nullSafeEquals(value, lookup.getValue())) {
|
||||
throw Failures.instance().failure(info, AssertErrors.shouldHaveProperty(actual, key, value));
|
||||
}
|
||||
|
||||
return myself;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#doesNotContainEntry(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DocumentAssert doesNotContainEntry(String key, Object value) {
|
||||
|
||||
Assert.hasText(key, "The key to look for must not be empty!");
|
||||
|
||||
Lookup<?> lookup = lookup(key);
|
||||
|
||||
if (lookup.isPathFound() && ObjectUtils.nullSafeEquals(value, lookup.getValue())) {
|
||||
throw Failures.instance().failure(info, AssertErrors.shouldNotHaveProperty(actual, key, value));
|
||||
}
|
||||
|
||||
return myself;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#containsKey(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DocumentAssert containsKey(String key) {
|
||||
return containsKeys(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#containsKeys(java.lang.Object[])
|
||||
*/
|
||||
@Override
|
||||
public final DocumentAssert containsKeys(String... keys) {
|
||||
|
||||
Set<String> notFound = new LinkedHashSet<>();
|
||||
|
||||
for (String key : keys) {
|
||||
|
||||
if (!lookup(key).isPathFound()) {
|
||||
notFound.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
if (!notFound.isEmpty()) {
|
||||
throw Failures.instance().failure(info, shouldContainKeys(actual, notFound));
|
||||
}
|
||||
|
||||
return myself;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#doesNotContainKey(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public DocumentAssert doesNotContainKey(String key) {
|
||||
return doesNotContainKeys(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#doesNotContainKeys(java.lang.Object[])
|
||||
*/
|
||||
@Override
|
||||
public final DocumentAssert doesNotContainKeys(String... keys) {
|
||||
|
||||
Set<String> found = new LinkedHashSet<>();
|
||||
for (String key : keys) {
|
||||
|
||||
if (lookup(key).isPathFound()) {
|
||||
found.add(key);
|
||||
}
|
||||
}
|
||||
if (!found.isEmpty()) {
|
||||
throw Failures.instance().failure(info, shouldNotContainKeys(actual, found));
|
||||
}
|
||||
|
||||
return myself;
|
||||
}
|
||||
|
||||
// override methods to annotate them with @SafeVarargs, we unfortunately can't do that in AbstractMapAssert as it is
|
||||
// used in soft assertions which need to be able to proxy method - @SafeVarargs requiring method to be final prevents
|
||||
// using proxies.
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#contains(java.util.Map.Entry[])
|
||||
*/
|
||||
@SafeVarargs
|
||||
@Override
|
||||
public final DocumentAssert contains(Map.Entry<? extends String, ? extends Object>... entries) {
|
||||
|
||||
// if both actual and values are empty, then assertion passes.
|
||||
if (actual.isEmpty() && entries.length == 0) {
|
||||
return myself;
|
||||
}
|
||||
Set<Map.Entry<? extends String, ? extends Object>> notFound = new LinkedHashSet<>();
|
||||
for (Map.Entry<? extends String, ? extends Object> entry : entries) {
|
||||
if (!containsEntry(entry)) {
|
||||
notFound.add(entry);
|
||||
}
|
||||
}
|
||||
if (!notFound.isEmpty()) {
|
||||
throw Failures.instance().failure(info, shouldContain(actual, entries, notFound));
|
||||
}
|
||||
|
||||
return myself;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#containsAnyOf(java.util.Map.Entry[])
|
||||
*/
|
||||
@SafeVarargs
|
||||
@Override
|
||||
public final DocumentAssert containsAnyOf(Map.Entry<? extends String, ? extends Object>... entries) {
|
||||
|
||||
for (Map.Entry<? extends String, ? extends Object> entry : entries) {
|
||||
if (containsEntry(entry)) {
|
||||
return myself;
|
||||
}
|
||||
}
|
||||
|
||||
throw Failures.instance().failure(info, ShouldContainAnyOf.shouldContainAnyOf(actual, entries));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#containsOnly(java.util.Map.Entry[])
|
||||
*/
|
||||
@SafeVarargs
|
||||
@Override
|
||||
public final DocumentAssert containsOnly(Map.Entry<? extends String, ? extends Object>... entries) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#doesNotContain(java.util.Map.Entry[])
|
||||
*/
|
||||
@SafeVarargs
|
||||
@Override
|
||||
public final DocumentAssert doesNotContain(Map.Entry<? extends String, ? extends Object>... entries) {
|
||||
|
||||
Set<Map.Entry<? extends String, ? extends Object>> found = new LinkedHashSet<>();
|
||||
|
||||
for (Map.Entry<? extends String, ? extends Object> entry : entries) {
|
||||
if (containsEntry(entry)) {
|
||||
found.add(entry);
|
||||
}
|
||||
}
|
||||
if (!found.isEmpty()) {
|
||||
throw Failures.instance().failure(info, shouldNotContain(actual, entries, found));
|
||||
}
|
||||
|
||||
return myself;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#containsExactly(java.util.Map.Entry[])
|
||||
*/
|
||||
@SafeVarargs
|
||||
@Override
|
||||
public final DocumentAssert containsExactly(Map.Entry<? extends String, ? extends Object>... entries) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private boolean containsEntry(Entry<? extends String, ?> entry) {
|
||||
|
||||
Lookup<?> lookup = lookup(entry.getKey());
|
||||
|
||||
return lookup.isPathFound() && ObjectUtils.nullSafeEquals(entry.getValue(), lookup.getValue());
|
||||
}
|
||||
|
||||
private <T> Lookup<T> lookup(String path) {
|
||||
return lookup(actual, path);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> Lookup<T> lookup(Bson source, String path) {
|
||||
|
||||
String[] fragments = path.split("(?<!\\\\)\\.");
|
||||
|
||||
if (fragments.length == 1) {
|
||||
|
||||
Document document = (Document) source;
|
||||
String pathToUse = path.replace("\\.", ".");
|
||||
|
||||
if (document.containsKey(pathToUse)) {
|
||||
return Lookup.found((T) document.get(pathToUse));
|
||||
}
|
||||
|
||||
return Lookup.notFound();
|
||||
}
|
||||
|
||||
Iterator<String> it = Arrays.asList(fragments).iterator();
|
||||
|
||||
Object current = source;
|
||||
while (it.hasNext()) {
|
||||
|
||||
String key = it.next().replace("\\.", ".");
|
||||
|
||||
if (!(current instanceof Bson) && !key.startsWith("[")) {
|
||||
return Lookup.found(null);
|
||||
}
|
||||
|
||||
if (key.startsWith("[")) {
|
||||
|
||||
String indexNumber = key.substring(1, key.indexOf("]"));
|
||||
|
||||
if (current instanceof List) {
|
||||
current = ((List) current).get(Integer.valueOf(indexNumber));
|
||||
}
|
||||
|
||||
if (!it.hasNext()) {
|
||||
return Lookup.found((T) current);
|
||||
}
|
||||
} else {
|
||||
|
||||
if (current instanceof Document) {
|
||||
|
||||
Document document = (Document) current;
|
||||
|
||||
if (!it.hasNext() && !document.containsKey(key)) {
|
||||
return Lookup.notFound();
|
||||
}
|
||||
|
||||
current = document.get(key);
|
||||
}
|
||||
|
||||
if (!it.hasNext()) {
|
||||
return Lookup.found((T) current);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Lookup.notFound();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#hasEntrySatisfying(java.lang.Object, org.assertj.core.api.Condition)
|
||||
*/
|
||||
@Override
|
||||
public DocumentAssert hasEntrySatisfying(String key, Condition<? super Object> valueCondition) {
|
||||
|
||||
Lookup<Object> value = lookup(key);
|
||||
|
||||
if (!value.isPathFound() || !valueCondition.matches(value.getValue())) {
|
||||
throw Failures.instance().failure(info, elementsShouldBe(actual, value, valueCondition));
|
||||
}
|
||||
|
||||
return myself;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.assertj.core.api.AbstractMapAssert#hasEntrySatisfying(java.lang.Object, java.util.function.Consumer)
|
||||
*/
|
||||
@Override
|
||||
public DocumentAssert hasEntrySatisfying(String key, Consumer<? super Object> valueRequirements) {
|
||||
|
||||
containsKey(key);
|
||||
|
||||
valueRequirements.accept(lookup(key).getValue());
|
||||
|
||||
return myself;
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
|
||||
@Getter
|
||||
static class Lookup<T> {
|
||||
|
||||
private final T value;
|
||||
private final boolean pathFound;
|
||||
|
||||
/**
|
||||
* Factory method to construct a lookup with a hit.
|
||||
*
|
||||
* @param value the actual value.
|
||||
* @return the lookup object.
|
||||
*/
|
||||
static <T> Lookup<T> found(T value) {
|
||||
return new Lookup<>(value, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to construct a lookup that yielded no match.
|
||||
*
|
||||
* @return the lookup object.
|
||||
*/
|
||||
static <T> Lookup<T> notFound() {
|
||||
return new Lookup<>(null, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -27,6 +27,7 @@
|
||||
* Support for `$caseSensitive` and `$diacriticSensitive` text search.
|
||||
* Support for GeoJSON Polygon with hole.
|
||||
* Performance improvements by bulk fetching ``DBRef``s.
|
||||
* Multi-faceted aggregations using `$facet`, `$bucket` and `$bucketAuto` via `Aggregation`.
|
||||
|
||||
[[new-features.1-9-0]]
|
||||
== What's new in Spring Data MongoDB 1.9
|
||||
|
||||
@@ -29,5 +29,5 @@ class Config {
|
||||
----
|
||||
====
|
||||
|
||||
If you expose a bean of type `AuditorAware` to the `ApplicationContext`, the auditing infrastructure will pick it up automatically and use it to determine the current user to be set on domain types. If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableJpaAuditing`.
|
||||
If you expose a bean of type `AuditorAware` to the `ApplicationContext`, the auditing infrastructure will pick it up automatically and use it to determine the current user to be set on domain types. If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableMongoAuditing`.
|
||||
|
||||
|
||||
@@ -1791,7 +1791,7 @@ At the time of this writing we provide support for the following Aggregation Ope
|
||||
[cols="2*"]
|
||||
|===
|
||||
| Pipeline Aggregation Operators
|
||||
| count, geoNear, graphLookup, group, limit, lookup, match, project, replaceRoot, skip, sort, unwind
|
||||
| bucket, bucketAuto, count, facet, geoNear, graphLookup, group, limit, lookup, match, project, replaceRoot, skip, sort, unwind
|
||||
|
||||
| Set Aggregation Operators
|
||||
| setEquals, setIntersection, setUnion, setDifference, setIsSubset, anyElementTrue, allElementsTrue
|
||||
@@ -1870,10 +1870,88 @@ project().and("foo").as("bar"), sort(ASC, "foo")
|
||||
|
||||
More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.facet]]
|
||||
=== Faceted classification
|
||||
|
||||
MongoDB supports as of Version 3.4 faceted classification using the Aggregation Framework. A faceted classification uses semantic categories, either general or subject-specific, that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classificated into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times.
|
||||
|
||||
==== Buckets
|
||||
|
||||
Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or grouping expression. They can be defined via the `bucket()`/`bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. The bucket operation can be extended with additional parameters through a fluent API via the `with…()` methods, the `andOutput(String)` method and aliased via the `as(String)` method. Each bucket is represented as a document in the output.
|
||||
|
||||
`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted.
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// will generate {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}}
|
||||
bucket("price").withBoundaries(0, 100, 400);
|
||||
|
||||
// will generate {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}}
|
||||
bucket("price").withBoundaries(0, 100).withDefault("Other");
|
||||
|
||||
// will generate {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutputCount().as("count");
|
||||
|
||||
// will generate {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}}
|
||||
bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
`BucketAutoOperation` determines boundaries itself in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or their powers of 10.
|
||||
|
||||
.Bucket operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// will generate {$bucketAuto: {groupBy: $price, buckets: 5}}
|
||||
bucketAuto("price", 5)
|
||||
|
||||
// will generate {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}}
|
||||
bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other");
|
||||
|
||||
// will generate {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}}
|
||||
bucketAuto("price", 5).andOutput("title").push().as("titles");
|
||||
----
|
||||
====
|
||||
|
||||
Bucket operations can use `AggregationExpression` via `andOutput()` and <<mongo.aggregation.projection.expressions, SpEL expressions>> via `andOutputExpression()` to create output fields in buckets.
|
||||
|
||||
Note that further details regarding bucket expressions can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and
|
||||
http://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
==== Multi-faceted aggregation
|
||||
|
||||
Multiple aggregation pipelines can be used to create multi-faceted aggregations which characterize data across multiple dimensions, or facets, within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, etc.
|
||||
|
||||
A `FacetOperation` can be defined via the `facet()` method of the `Aggregation` class. It can be customized with multiple aggregation pipelines via the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents.
|
||||
|
||||
Sub-pipelines can project and filter input documents prior grouping. Common cases are extraction of date parts or calculations before categorization.
|
||||
|
||||
.Facet operation examples
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
// will generate {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}}
|
||||
facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice"))
|
||||
|
||||
// will generate {$facet: {categorizedByYear: [
|
||||
// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}},
|
||||
// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}}
|
||||
// ]}}
|
||||
facet(project("title").and("publicationDate").extractYear().as("publicationYear"),
|
||||
bucketAuto("publicationYear", 5).andOutput("title").push().as("titles"))
|
||||
.as("categorizedByYear"))
|
||||
----
|
||||
====
|
||||
|
||||
Note that further details regarding facet operation can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation.
|
||||
|
||||
[[mongo.aggregation.projection.expressions]]
|
||||
==== Spring Expression Support in Projection Expressions
|
||||
|
||||
As of Version 1.4.0 we support the use of SpEL expression in projection expressions via the `andExpression` method of the `ProjectionOperation` class. This allows you to define the desired expression as a SpEL expression which is translated into a corresponding MongoDB projection expression part on query execution. This makes it much easier to express complex calculations.
|
||||
We support the use of SpEL expression in projection expressions via the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This allows you to define the desired expression as a SpEL expression which is translated into a corresponding MongoDB projection expression part on query execution. This makes it much easier to express complex calculations.
|
||||
|
||||
===== Complex calculations with SpEL expressions
|
||||
|
||||
@@ -2419,13 +2497,14 @@ NOTE: Collection creation allows customization via `CollectionOptions` and suppo
|
||||
[[mongo-template.commands]]
|
||||
== Executing Commands
|
||||
|
||||
You can also get at the MongoDB driver's `DB.command( )` method using the `executeCommand(…)` methods on `MongoTemplate`. These will also perform exception translation into Spring's `DataAccessException` hierarchy.
|
||||
You can also get at the MongoDB driver's `MongoDatabase.runCommand( )` method using the `executeCommand(…)` methods on `MongoTemplate`. These will also perform exception translation into Spring's `DataAccessException` hierarchy.
|
||||
|
||||
[[mongo-template.commands.execution]]
|
||||
=== Methods for executing commands
|
||||
|
||||
* `CommandResult` *executeCommand* `(Document command)` Execute a MongoDB command.
|
||||
* `CommandResult` *executeCommand* `(String jsonCommand)` Execute the a MongoDB command expressed as a JSON string.
|
||||
* `Document` *executeCommand* `(Document command)` Execute a MongoDB command.
|
||||
* `Document` *executeCommand* `(Document command, ReadPreference readPreference)` Execute a MongoDB command using the given nullable MongoDB `ReadPreference`.
|
||||
* `Document` *executeCommand* `(String jsonCommand)` Execute the a MongoDB command expressed as a JSON string.
|
||||
|
||||
[[mongodb.mapping-usage.events]]
|
||||
== Lifecycle Events
|
||||
@@ -2602,7 +2681,7 @@ class GridFsClient {
|
||||
|
||||
@Test
|
||||
public void findFilesInGridFs() {
|
||||
List<GridFSDBFile> result = operations.find(query(whereFilename().is("filename.txt")))
|
||||
GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt")))
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
@@ -190,10 +190,30 @@ public interface PersonRepository extends ReactiveMongoRepository<Person, String
|
||||
[[mongo.reactive.repositories.infinite-streams]]
|
||||
== Infinite Streams with Tailable Cursors
|
||||
|
||||
By default, MongoDB will automatically close a cursor when the client has exhausted all results in the cursor. Closing a cursors turns a Stream into a finite stream. However, for capped collections you may use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client exhausts the results in the initial cursor. Using tailable Cursors with a reactive approach allows construction of infinite streams. A tailable Cursor remains open until it's closed. It emits data as data arrives in a capped collection. Using Tailable Cursors with Collections is not possible as its result would never complete.
|
||||
By default, MongoDB will automatically close a cursor when the client exhausts all results supplied by the cursor. Closing a cursor on exhaustion turns a stream into a finite stream. For https://docs.mongodb.com/manual/core/capped-collections/[capped collections] you may use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client consumes all initially returned data. Using tailable cursors with a reactive data types allows construction of infinite streams. A tailable cursor remains open until it is closed externally. It emits data as new documents arrive in a capped collection.
|
||||
|
||||
Spring Data MongoDB Reactive Repository support supports infinite streams by annotating a query method with `@TailableCursor`. This works for methods returning `Flux` or `Observable` wrapper types.
|
||||
Tailable cursors may become dead, or invalid, if either the query returns no match or the cursor returns the document at the "end" of the collection and then the application deletes that document.
|
||||
|
||||
|
||||
.Infinite Stream queries with ReactiveMongoOperations
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
Flux<Person> stream = template.tail(query(where("name").is("Joe")), Person.class);
|
||||
|
||||
Disposable subscription = stream.doOnNext(person -> System.out.println(person)).subscribe();
|
||||
|
||||
// …
|
||||
|
||||
// Later: Dispose the subscription to close the stream
|
||||
subscription.dispose();
|
||||
----
|
||||
====
|
||||
|
||||
Spring Data MongoDB Reactive repositories support infinite streams by annotating a query method with `@Tailable`. This works for methods returning `Flux` and other reactive types capable of emitting multiple elements.
|
||||
|
||||
.Infinite Stream queries with ReactiveMongoRepository
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
|
||||
@@ -210,6 +230,9 @@ Disposable subscription = stream.doOnNext(System.out::println).subscribe();
|
||||
|
||||
// …
|
||||
|
||||
// Later: Dispose the stream
|
||||
// Later: Dispose the subscription to close the stream
|
||||
subscription.dispose();
|
||||
----
|
||||
====
|
||||
|
||||
TIP: Capped collections can be created via `MongoOperations.createCollection`. Just provide the required `CollectionOptions.empty().capped()...`
|
||||
|
||||
@@ -457,24 +457,6 @@ NOTE: This example is meant to show the use of save, update and remove operation
|
||||
|
||||
The query syntax used in the example is explained in more detail in the section <<mongo.query,Querying Documents>>. Additional documentation can be found in <<mongo-template, the blocking MongoTemplate>> section.
|
||||
|
||||
[[mongo.reactive.tailcursors]]
|
||||
== Infinite Streams
|
||||
|
||||
By default, MongoDB will automatically close a cursor when the client has exhausted all results in the cursor. Closing a cursors turns a Stream into a finite stream. However, for capped collections you may use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client exhausts the results in the initial cursor. Using Tailable Cursors with a reactive approach allows construction of infinite streams. A Tailable Cursor remains open until it's closed. It emits data as data arrives in a capped collection. Using Tailable Cursors with Collections is not possible as its result would never complete.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Flux<Person> stream = template.tail(query(where("name").is("Joe")), Person.class);
|
||||
|
||||
Disposable subscription = stream.doOnNext(person -> System.out.println(person)).subscribe();
|
||||
|
||||
// …
|
||||
|
||||
// Later: Dispose the stream
|
||||
subscription.dispose();
|
||||
----
|
||||
|
||||
|
||||
[[mongo.reactive.executioncallback]]
|
||||
== Execution callbacks
|
||||
|
||||
|
||||
@@ -1,6 +1,62 @@
|
||||
Spring Data MongoDB Changelog
|
||||
=============================
|
||||
|
||||
Changes in version 2.0.3.RELEASE (2018-01-24)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1858 - Fix line endings.
|
||||
* DATAMONGO-1850 - GridFsResource.getContentType() throws NullPointerException on absent metadata.
|
||||
* DATAMONGO-1843 - Aggregation operator $reduce with ArrayOperators.Reduce produce a wrong Document.
|
||||
* DATAMONGO-1831 - Failure to read Scala collection types in MappingMongoConverter.
|
||||
* DATAMONGO-1830 - Release 2.0.3 (Kay SR3).
|
||||
* DATAMONGO-1824 - Assert compatibility with MongoDB Server 3.6.
|
||||
|
||||
|
||||
Changes in version 1.10.10.RELEASE (2018-01-24)
|
||||
-----------------------------------------------
|
||||
* DATAMONGO-1843 - Aggregation operator $reduce with ArrayOperators.Reduce produce a wrong Document.
|
||||
* DATAMONGO-1831 - Failure to read Scala collection types in MappingMongoConverter.
|
||||
* DATAMONGO-1829 - Release 1.10.10 (Ingalls SR10).
|
||||
* DATAMONGO-1824 - Assert compatibility with MongoDB Server 3.6.
|
||||
|
||||
|
||||
Changes in version 2.0.2.RELEASE (2017-11-27)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1823 - AfterConvertEvent is not published when using custom methods in repository interface.
|
||||
* DATAMONGO-1821 - Fix method ambiguity in tests when compiling against MongoDB 3.6.
|
||||
* DATAMONGO-1820 - Investigate failing TravisCI build.
|
||||
* DATAMONGO-1818 - Reference documentation mentions @TailableCursor instead of @Tailable.
|
||||
* DATAMONGO-1817 - Kotlin extensions should return nullable types.
|
||||
* DATAMONGO-1816 - Release 2.0.2 (Kay SR2).
|
||||
|
||||
|
||||
Changes in version 1.10.9.RELEASE (2017-11-27)
|
||||
----------------------------------------------
|
||||
* DATAMONGO-1809 - Type hint usage broken when using positional parameters with more than one digit.
|
||||
* DATAMONGO-1799 - Release 1.10.9 (Ingalls SR9).
|
||||
* DATAMONGO-1696 - Reference documentation uses JPA Annotations.
|
||||
|
||||
|
||||
Changes in version 2.0.1.RELEASE (2017-10-27)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1815 - Adapt API changes in Property in test cases.
|
||||
* DATAMONGO-1814 - Missing documentation on Faceted classification.
|
||||
* DATAMONGO-1811 - Reference Documentation doesn't match with API Documentation 2.X vesrion.
|
||||
* DATAMONGO-1809 - Type hint usage broken when using positional parameters with more than one digit.
|
||||
* DATAMONGO-1806 - GridFsResource wrong type in javaDoc.
|
||||
* DATAMONGO-1805 - Documentation for operations.find uses wrong result type.
|
||||
* DATAMONGO-1802 - No converter found capable of converting from type org.bson.types.Binary to type byte[].
|
||||
* DATAMONGO-1795 - Remove obsolete Kotlin build configuration.
|
||||
* DATAMONGO-1793 - Release 2.0.1 (Kay SR1).
|
||||
* DATAMONGO-1696 - Reference documentation uses JPA Annotations.
|
||||
|
||||
|
||||
Changes in version 1.10.8.RELEASE (2017-10-11)
|
||||
----------------------------------------------
|
||||
* DATAMONGO-1784 - Add support for AggregationExpression in GroupOperation.sum.
|
||||
* DATAMONGO-1782 - CyclicPropertyReferenceException on index resolution.
|
||||
* DATAMONGO-1775 - Release 1.10.8 (Ingalls SR8).
|
||||
|
||||
|
||||
Changes in version 2.0.0.RELEASE (2017-10-02)
|
||||
---------------------------------------------
|
||||
* DATAMONGO-1791 - Adapt to changed Spring Framework 5 documentation structure.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 2.0 GA
|
||||
Spring Data MongoDB 2.0.3
|
||||
Copyright (c) [2010-2015] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
|
||||
Reference in New Issue
Block a user