Compare commits
254 Commits
1.7.x
...
1.9.7.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
00d9da3027 | ||
|
|
53eaaa02a0 | ||
|
|
315b642b3b | ||
|
|
958752e72f | ||
|
|
6b1dbe372e | ||
|
|
a644187131 | ||
|
|
681a4f9855 | ||
|
|
f2be1b2ca9 | ||
|
|
3c203eba8e | ||
|
|
f3b0665d94 | ||
|
|
9f43d3fc5a | ||
|
|
95985fffc8 | ||
|
|
6c6ac6da5b | ||
|
|
c1ac8767b7 | ||
|
|
96068eb0e2 | ||
|
|
36d2e0942b | ||
|
|
b585783b75 | ||
|
|
3924b6f12a | ||
|
|
2674880946 | ||
|
|
6c6f953a42 | ||
|
|
772e8ac85e | ||
|
|
2bbffed62b | ||
|
|
685990bdd6 | ||
|
|
ff83ac3fb4 | ||
|
|
6827a09f26 | ||
|
|
a5148f89c1 | ||
|
|
995a680823 | ||
|
|
9f0abb69fd | ||
|
|
d65eebe9c3 | ||
|
|
ca4f1f1b7c | ||
|
|
46b119ce71 | ||
|
|
fc0dd7d094 | ||
|
|
712d8be7bb | ||
|
|
536dcc14ca | ||
|
|
dc44c3a455 | ||
|
|
8e90366712 | ||
|
|
ed36fd7260 | ||
|
|
31a6a74743 | ||
|
|
001ff508b5 | ||
|
|
6882fa9d10 | ||
|
|
91eaae0ef6 | ||
|
|
785dc6ab78 | ||
|
|
f011a9a4ee | ||
|
|
c6c58050e7 | ||
|
|
5fce8bcac6 | ||
|
|
2f522bae5c | ||
|
|
2e6f91924d | ||
|
|
15f7a9c74a | ||
|
|
49f52f0258 | ||
|
|
396ea471fb | ||
|
|
eef17dd000 | ||
|
|
84dc03b9d1 | ||
|
|
0ce220d54f | ||
|
|
b693136396 | ||
|
|
3c117db43b | ||
|
|
075ccb1d00 | ||
|
|
e3bddd1c19 | ||
|
|
22b113ce64 | ||
|
|
0f5e91b091 | ||
|
|
557a528690 | ||
|
|
762569c826 | ||
|
|
ad7d82f521 | ||
|
|
04deaacbec | ||
|
|
ec443f2b5e | ||
|
|
f1b04ff354 | ||
|
|
62dd7d070a | ||
|
|
5df92a86a3 | ||
|
|
2ca3df1ff4 | ||
|
|
a8751249fd | ||
|
|
19abff826e | ||
|
|
5f199cf81f | ||
|
|
eb26b78a19 | ||
|
|
3d0053c61a | ||
|
|
7b15d246e8 | ||
|
|
b75f4a2834 | ||
|
|
d6ac4c6df5 | ||
|
|
02f56c88f5 | ||
|
|
cd35b9ed2a | ||
|
|
e8944a6c3a | ||
|
|
6fcbc225eb | ||
|
|
f06eda488c | ||
|
|
0ef910445d | ||
|
|
b22ee9d27c | ||
|
|
859a0e83c8 | ||
|
|
b35f151b80 | ||
|
|
17afb07e45 | ||
|
|
b407963344 | ||
|
|
8b31ba1836 | ||
|
|
0cf6edae43 | ||
|
|
0824105377 | ||
|
|
dc936a5b7b | ||
|
|
c8fe02e48e | ||
|
|
32547db306 | ||
|
|
41902154ca | ||
|
|
2354ced1bf | ||
|
|
791cc3a1b8 | ||
|
|
021c03fbbf | ||
|
|
e4a59f29d0 | ||
|
|
64d4880983 | ||
|
|
47c348e03a | ||
|
|
dea86535c1 | ||
|
|
eee6b62589 | ||
|
|
771ca8d84c | ||
|
|
8f5b334951 | ||
|
|
0dc6169282 | ||
|
|
abe78f0428 | ||
|
|
9930ec2d19 | ||
|
|
83d7f4477e | ||
|
|
18c3704c2e | ||
|
|
bef581caa5 | ||
|
|
2f0abe0604 | ||
|
|
4235b44c47 | ||
|
|
f318185ad0 | ||
|
|
43b496287c | ||
|
|
9d0c8ecdc3 | ||
|
|
5a78d99af0 | ||
|
|
693f5ddf6e | ||
|
|
ece655f67d | ||
|
|
119692c979 | ||
|
|
6068f3243a | ||
|
|
a7cda2e793 | ||
|
|
2687cb85f0 | ||
|
|
b2ce1700d2 | ||
|
|
0b634f8340 | ||
|
|
9a078b743f | ||
|
|
65b6576cfc | ||
|
|
78e99e6df2 | ||
|
|
bb0a42733d | ||
|
|
a2ae08e263 | ||
|
|
eaa9d6c7e6 | ||
|
|
8900695153 | ||
|
|
bfe548d573 | ||
|
|
7ab4002771 | ||
|
|
6eace856aa | ||
|
|
f10e5a19c5 | ||
|
|
90a4a63776 | ||
|
|
0f14e35ba3 | ||
|
|
ad0c4207d6 | ||
|
|
97da43645a | ||
|
|
42b7c42617 | ||
|
|
bd81e25e6b | ||
|
|
debe6aa649 | ||
|
|
6f433902f0 | ||
|
|
ba902e7f8e | ||
|
|
7e8ec21684 | ||
|
|
b7131b7efc | ||
|
|
ace99c3464 | ||
|
|
83fc5bc113 | ||
|
|
160de0adf6 | ||
|
|
b4753f3a83 | ||
|
|
bce6e2c78c | ||
|
|
b5ea0eccd2 | ||
|
|
87865b9761 | ||
|
|
13fa4703c0 | ||
|
|
5a21e00322 | ||
|
|
3feed2bc5a | ||
|
|
501b9501e0 | ||
|
|
727271e68c | ||
|
|
63a619dddf | ||
|
|
113566a6ab | ||
|
|
7862841b48 | ||
|
|
fe6cbaa03d | ||
|
|
9ef1fc7304 | ||
|
|
cf3a9d3ced | ||
|
|
1d1c80db7b | ||
|
|
eeb37e9104 | ||
|
|
18bf0daee7 | ||
|
|
1e9189aee7 | ||
|
|
95f6dfafdd | ||
|
|
bedaae8a90 | ||
|
|
7bfa3fe7fd | ||
|
|
143b0b73b9 | ||
|
|
cbfc46270e | ||
|
|
b31efb46ec | ||
|
|
ef3477098f | ||
|
|
9dce117555 | ||
|
|
e66e1e0502 | ||
|
|
19e1e9daeb | ||
|
|
ec8a948f3f | ||
|
|
38fc7641a0 | ||
|
|
ddc3925659 | ||
|
|
f8416edf8f | ||
|
|
4f94f37ce8 | ||
|
|
528de58418 | ||
|
|
e6ea34aed8 | ||
|
|
f171938b00 | ||
|
|
7b27368d2d | ||
|
|
f754df51bc | ||
|
|
77dce53c7a | ||
|
|
73f268e7c4 | ||
|
|
075d7d8131 | ||
|
|
206337044a | ||
|
|
55b44ff7aa | ||
|
|
ae48639ae9 | ||
|
|
6b5e78f810 | ||
|
|
3e485e0a88 | ||
|
|
335c78f908 | ||
|
|
b103e4eaf6 | ||
|
|
c4a6c63d23 | ||
|
|
4a4f10f97b | ||
|
|
a5712daab7 | ||
|
|
28cb1ef106 | ||
|
|
0d99a3e527 | ||
|
|
9da43263ce | ||
|
|
784e199068 | ||
|
|
1ffee802c0 | ||
|
|
6f0ac7f0c2 | ||
|
|
941d4d8985 | ||
|
|
44c76d8ffb | ||
|
|
df9a9f5fb6 | ||
|
|
bebd0fa0e6 | ||
|
|
594e90789d | ||
|
|
f2ab42cb80 | ||
|
|
3224fa8ce7 | ||
|
|
ce156c1344 | ||
|
|
434e553022 | ||
|
|
de5b5ee4b0 | ||
|
|
60636bf56d | ||
|
|
1ca71f93e9 | ||
|
|
63ff39bed6 | ||
|
|
cb0b9604d4 | ||
|
|
1dbe3b62d7 | ||
|
|
5c0707d221 | ||
|
|
c4ffc37dd5 | ||
|
|
aaf93b0f6f | ||
|
|
23eab1e84f | ||
|
|
218f32e552 | ||
|
|
62fbe4d08c | ||
|
|
41ffd00619 | ||
|
|
98b9a604cf | ||
|
|
01468b640a | ||
|
|
4d96b036a2 | ||
|
|
2d1ac15e24 | ||
|
|
2c27e8576f | ||
|
|
67f638d953 | ||
|
|
ea5bd5f7d3 | ||
|
|
394f695416 | ||
|
|
e4db466ab9 | ||
|
|
ee04c014c9 | ||
|
|
ea84f08de8 | ||
|
|
7d8a2b2d56 | ||
|
|
995d1e5aac | ||
|
|
3b918492ae | ||
|
|
66b419163c | ||
|
|
52bff39c22 | ||
|
|
d151a13e87 | ||
|
|
5e7e7d3598 | ||
|
|
356248bd05 | ||
|
|
73a60153f6 | ||
|
|
67cf0e62a7 | ||
|
|
21fbcc3e67 | ||
|
|
0d63ff92a0 | ||
|
|
983645e222 | ||
|
|
d2805bfa47 |
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
Thank you for proposing a pull request. This template will guide you through the essential steps necessary for a pull request.
|
||||
Make sure that:
|
||||
|
||||
- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc).
|
||||
- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO).
|
||||
- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes.
|
||||
- [ ] You submit test cases (unit or integration tests) that back your changes.
|
||||
- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only).
|
||||
- [ ] You provide your full name and an email address registered with your GitHub account. If you’re a first-time submitter, make sure you have completed the [Contributor’s License Agreement form](https://support.springsource.com/spring_committer_signup).
|
||||
20
.travis.yml
20
.travis.yml
@@ -3,13 +3,29 @@ language: java
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
|
||||
services:
|
||||
- mongodb
|
||||
before_script:
|
||||
- mongod --version
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- PROFILE=ci
|
||||
- PROFILE=mongo-next
|
||||
- PROFILE=mongo3
|
||||
- PROFILE=mongo3-next
|
||||
- PROFILE=mongo31
|
||||
- PROFILE=mongo32
|
||||
- PROFILE=mongo33
|
||||
- PROFILE=mongo34-next
|
||||
|
||||
# Current MongoDB version is 2.4.2 as of 2016-04, see https://github.com/travis-ci/travis-ci/issues/3694
|
||||
# apt-get starts a MongoDB instance so it's not started using before_script
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- mongodb-3.2-precise
|
||||
packages:
|
||||
- mongodb-org-server
|
||||
- mongodb-org-shell
|
||||
|
||||
sudo: false
|
||||
|
||||
|
||||
27
CODE_OF_CONDUCT.adoc
Normal file
27
CODE_OF_CONDUCT.adoc
Normal file
@@ -0,0 +1,27 @@
|
||||
= Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
|
||||
|
||||
We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery
|
||||
* Personal attacks
|
||||
* Trolling or insulting/derogatory comments
|
||||
* Public or private harassment
|
||||
* Publishing other's private information, such as physical or electronic addresses,
|
||||
without explicit permission
|
||||
* Other unethical or unprofessional conduct
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io.
|
||||
All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
|
||||
Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
This Code of Conduct is adapted from the http://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at http://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/].
|
||||
@@ -1 +0,0 @@
|
||||
You find the contribution guidelines for Spring Data projects [here](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md).
|
||||
3
CONTRIBUTING.adoc
Normal file
3
CONTRIBUTING.adoc
Normal file
@@ -0,0 +1,3 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
81
pom.xml
81
pom.xml
@@ -1,11 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.9.7.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,8 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>1.6.0.RELEASE</version>
|
||||
<relativePath>../spring-data-build/parent/pom.xml</relativePath>
|
||||
<version>1.8.7.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -29,8 +28,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>1.10.0.RELEASE</springdata.commons>
|
||||
<mongo>2.13.0</mongo>
|
||||
<springdata.commons>1.12.7.RELEASE</springdata.commons>
|
||||
<mongo>2.14.3</mongo>
|
||||
<mongo.osgi>2.13.0</mongo.osgi>
|
||||
</properties>
|
||||
|
||||
@@ -108,7 +107,7 @@
|
||||
|
||||
<id>mongo-next</id>
|
||||
<properties>
|
||||
<mongo>2.13.0-SNAPSHOT</mongo>
|
||||
<mongo>2.15.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
@@ -124,7 +123,7 @@
|
||||
|
||||
<id>mongo3</id>
|
||||
<properties>
|
||||
<mongo>3.0.0-beta3</mongo>
|
||||
<mongo>3.0.4</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
@@ -133,7 +132,7 @@
|
||||
|
||||
<id>mongo3-next</id>
|
||||
<properties>
|
||||
<mongo>3.0.0-SNAPSHOT</mongo>
|
||||
<mongo>3.0.5-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
@@ -144,6 +143,70 @@
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo31</id>
|
||||
<properties>
|
||||
<mongo>3.1.1</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo32</id>
|
||||
<properties>
|
||||
<mongo>3.2.2</mongo>
|
||||
</properties>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo33</id>
|
||||
<properties>
|
||||
<mongo>3.3.0</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
|
||||
<id>mongo34-next</id>
|
||||
<properties>
|
||||
<mongo>3.4.0-SNAPSHOT</mongo>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>mongo-snapshots</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>release</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.jfrog.buildinfo</groupId>
|
||||
<artifactId>artifactory-maven-plugin</artifactId>
|
||||
<inherited>false</inherited>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.9.7.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.9.7.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -37,6 +37,8 @@ import com.mongodb.MongoException;
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
@@ -45,7 +47,7 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
protected final Logger log = LoggerFactory.getLogger(getClass());
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
@@ -76,25 +78,25 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for " + dbk);
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
for (DBObject dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: " + key);
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException("Unble to convert property " + key + ": Invalid metadata, "
|
||||
+ ENTITY_FIELD_CLASS + " not available");
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: " + key);
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
@@ -109,9 +111,9 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
|
||||
log.debug("getPersistentId called on " + entity);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
@@ -130,7 +132,7 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: " + cs.getValues());
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
@@ -152,7 +154,7 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
});
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: " + dbQuery);
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
@@ -164,7 +166,7 @@ public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
final DBObject dbDoc = new BasicDBObject();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: " + dbQuery);
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.9.7.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.9.7.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,6 +30,7 @@ import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
@@ -37,6 +38,7 @@ import com.mongodb.WriteConcern;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @auhtor Christoph Strobl
|
||||
*/
|
||||
public class MongoLog4jAppender extends AppenderSkeleton {
|
||||
|
||||
@@ -58,8 +60,8 @@ public class MongoLog4jAppender extends AppenderSkeleton {
|
||||
protected String collectionPattern = "%c";
|
||||
protected PatternLayout collectionLayout = new PatternLayout(collectionPattern);
|
||||
protected String applicationId = System.getProperty("APPLICATION_ID", null);
|
||||
protected WriteConcern warnOrHigherWriteConcern = WriteConcern.SAFE;
|
||||
protected WriteConcern infoOrLowerWriteConcern = WriteConcern.NORMAL;
|
||||
protected WriteConcern warnOrHigherWriteConcern = WriteConcern.ACKNOWLEDGED;
|
||||
protected WriteConcern infoOrLowerWriteConcern = WriteConcern.UNACKNOWLEDGED;
|
||||
protected Mongo mongo;
|
||||
protected DB db;
|
||||
|
||||
@@ -128,7 +130,7 @@ public class MongoLog4jAppender extends AppenderSkeleton {
|
||||
}
|
||||
|
||||
protected void connectToMongo() throws UnknownHostException {
|
||||
this.mongo = new Mongo(host, port);
|
||||
this.mongo = new MongoClient(host, port);
|
||||
this.db = mongo.getDB(database);
|
||||
}
|
||||
|
||||
@@ -160,7 +162,7 @@ public class MongoLog4jAppender extends AppenderSkeleton {
|
||||
|
||||
// Copy properties into document
|
||||
Map<Object, Object> props = event.getProperties();
|
||||
if (null != props && props.size() > 0) {
|
||||
if (null != props && !props.isEmpty()) {
|
||||
BasicDBObject propsDbo = new BasicDBObject();
|
||||
for (Map.Entry<Object, Object> entry : props.entrySet()) {
|
||||
propsDbo.put(entry.getKey().toString(), entry.getValue().toString());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,37 +22,44 @@ import java.util.Calendar;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.log4j.MDC;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Integration tests for {@link MongoLog4jAppender}.
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoLog4jAppenderIntegrationTests {
|
||||
|
||||
static final String NAME = MongoLog4jAppenderIntegrationTests.class.getName();
|
||||
|
||||
Logger log = Logger.getLogger(NAME);
|
||||
Mongo mongo;
|
||||
private static final Logger log = Logger.getLogger(NAME);
|
||||
MongoClient mongo;
|
||||
DB db;
|
||||
String collection;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
|
||||
mongo = new Mongo("localhost", 27017);
|
||||
mongo = new MongoClient("localhost", 27017);
|
||||
db = mongo.getDB("logs");
|
||||
|
||||
Calendar now = Calendar.getInstance();
|
||||
collection = String.valueOf(now.get(Calendar.YEAR)) + String.format("%1$02d", now.get(Calendar.MONTH) + 1);
|
||||
db.getCollection(collection).drop();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
db.getCollection(collection).remove(new BasicDBObject());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -64,7 +71,6 @@ public class MongoLog4jAppenderIntegrationTests {
|
||||
log.error("ERROR message");
|
||||
|
||||
DBCursor msgs = db.getCollection(collection).find();
|
||||
|
||||
assertThat(msgs.count(), is(4));
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<context version="7.1.10.209">
|
||||
<context version="7.2.2.230">
|
||||
<scope type="Project" name="spring-data-mongodb">
|
||||
<element type="TypeFilterReferenceOverridden" name="Filter">
|
||||
<element type="IncludeTypePattern" name="org.springframework.data.mongodb.**"/>
|
||||
@@ -35,6 +35,12 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|API" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Repositories::Subsystem|Implementation" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="CDI">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.cdi.**"/>
|
||||
</element>
|
||||
<stereotype name="Unrestricted"/>
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Config" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core" type="AllowedDependency"/>
|
||||
</element>
|
||||
@@ -76,6 +82,11 @@
|
||||
</element>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Script">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.script.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Conversion">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.core.convert.**"/>
|
||||
@@ -83,6 +94,7 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Script" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="SpEL">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
@@ -105,6 +117,11 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="MapReduce">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="**.mapreduce.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Core">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="WeakTypePattern" name="**.core.**"/>
|
||||
@@ -113,8 +130,10 @@
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Conversion" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Geospatial" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Index" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|MapReduce" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Mapping" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Query" type="AllowedDependency"/>
|
||||
<dependency toName="Project|spring-data-mongodb::Layer|Core::Subsystem|Script" type="AllowedDependency"/>
|
||||
</element>
|
||||
<element type="Subsystem" name="Util">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
@@ -169,7 +188,32 @@
|
||||
</element>
|
||||
<element type="Subsystem" name="Querydsl">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.mysema.query.**"/>
|
||||
<element type="IncludeTypePattern" name="com.querydsl.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Slf4j">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.slf4j.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Jackson">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.fasterxml.jackson.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="DOM">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.w3c.dom.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="AOP Alliance">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="org.aopalliance.**"/>
|
||||
</element>
|
||||
</element>
|
||||
<element type="Subsystem" name="Guava">
|
||||
<element type="TypeFilter" name="Assignment">
|
||||
<element type="IncludeTypePattern" name="com.google.common.**"/>
|
||||
</element>
|
||||
</element>
|
||||
</architecture>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>1.7.0.RELEASE</version>
|
||||
<version>1.9.7.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -59,14 +59,14 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<artifactId>querydsl-mongodb</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<artifactId>querydsl-apt</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<scope>provided</scope>
|
||||
@@ -183,7 +183,7 @@
|
||||
<version>${apt}</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.mysema.querydsl</groupId>
|
||||
<groupId>com.querydsl</groupId>
|
||||
<artifactId>querydsl-apt</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
</dependency>
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.BulkWriteError;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Is thrown when errors occur during bulk operations.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
public class BulkOperationException extends DataAccessException {
|
||||
|
||||
private static final long serialVersionUID = 73929601661154421L;
|
||||
|
||||
private final List<BulkWriteError> errors;
|
||||
private final BulkWriteResult result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
|
||||
*
|
||||
* @param message must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
public BulkOperationException(String message, BulkWriteException source) {
|
||||
|
||||
super(message, source);
|
||||
|
||||
this.errors = source.getWriteErrors();
|
||||
this.result = source.getWriteResult();
|
||||
}
|
||||
|
||||
public List<BulkWriteError> getErrors() {
|
||||
return errors;
|
||||
}
|
||||
|
||||
public BulkWriteResult getResult() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,24 +15,24 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import static org.springframework.beans.factory.config.BeanDefinition.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
import org.springframework.data.config.ParsingUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -71,7 +71,6 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
defaultDependenciesIfNecessary(registry, annotationMetadata);
|
||||
super.registerBeanDefinitions(annotationMetadata, registry);
|
||||
}
|
||||
|
||||
@@ -85,7 +84,11 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class);
|
||||
builder.addConstructorArgReference(MAPPING_CONTEXT_BEAN_NAME);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(MongoMappingContextLookup.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||
}
|
||||
|
||||
@@ -102,29 +105,58 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEventListener.class);
|
||||
listenerBeanDefinitionBuilder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(
|
||||
getAuditingHandlerBeanName(), registry));
|
||||
listenerBeanDefinitionBuilder
|
||||
.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register default bean definitions for a {@link MongoMappingContext} and an {@link IsNewStrategyFactory} in case we
|
||||
* don't find beans with the assumed names in the registry.
|
||||
*
|
||||
* @param registry the {@link BeanDefinitionRegistry} to use to register the components into.
|
||||
* @param source the source which the registered components shall be registered with
|
||||
* Simple helper to be able to wire the {@link MappingContext} from a {@link MappingMongoConverter} bean available in
|
||||
* the application context.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private void defaultDependenciesIfNecessary(BeanDefinitionRegistry registry, Object source) {
|
||||
static class MongoMappingContextLookup
|
||||
implements FactoryBean<MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
|
||||
if (!registry.containsBeanDefinition(MAPPING_CONTEXT_BEAN_NAME)) {
|
||||
private final MappingMongoConverter converter;
|
||||
|
||||
RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class);
|
||||
definition.setRole(ROLE_INFRASTRUCTURE);
|
||||
definition.setSource(source);
|
||||
/**
|
||||
* Creates a new {@link MongoMappingContextLookup} for the given {@link MappingMongoConverter}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
public MongoMappingContextLookup(MappingMongoConverter converter) {
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
registry.registerBeanDefinition(MAPPING_CONTEXT_BEAN_NAME, definition);
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> getObject() throws Exception {
|
||||
return converter.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MappingContext.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#isSingleton()
|
||||
*/
|
||||
@Override
|
||||
public boolean isSingleton() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,8 +17,11 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -28,10 +31,13 @@ import com.mongodb.MongoCredential;
|
||||
* Parse a {@link String} to a Collection of {@link MongoCredential}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @since 1.7
|
||||
*/
|
||||
public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
private static final Pattern GROUP_PATTERN = Pattern.compile("(\\\\?')(.*?)\\1");
|
||||
|
||||
private static final String AUTH_MECHANISM_KEY = "uri.authMechanism";
|
||||
private static final String USERNAME_PASSWORD_DELIMINATOR = ":";
|
||||
private static final String DATABASE_DELIMINATOR = "@";
|
||||
@@ -51,11 +57,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
List<MongoCredential> credentials = new ArrayList<MongoCredential>();
|
||||
|
||||
for (String credentialString : text.split(",")) {
|
||||
|
||||
if (!text.contains(USERNAME_PASSWORD_DELIMINATOR) || !text.contains(DATABASE_DELIMINATOR)) {
|
||||
throw new IllegalArgumentException("Credentials need to be in format 'username:password@database'!");
|
||||
}
|
||||
for (String credentialString : extractCredentialsString(text)) {
|
||||
|
||||
String[] userNameAndPassword = extractUserNameAndPassword(credentialString);
|
||||
String database = extractDB(credentialString);
|
||||
@@ -68,43 +70,83 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
String authMechanism = options.getProperty(AUTH_MECHANISM_KEY);
|
||||
|
||||
if (MongoCredential.GSSAPI_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0]));
|
||||
} else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createMongoX509Credential(userNameAndPassword[0]));
|
||||
} else if (MongoCredential.PLAIN_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createPlainCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.SCRAM_SHA_1_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createScramSha1Credential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
credentials.add(MongoCredential.createCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(
|
||||
MongoCredential.createCredential(userNameAndPassword[0], database, userNameAndPassword[1].toCharArray()));
|
||||
}
|
||||
}
|
||||
|
||||
setValue(credentials);
|
||||
}
|
||||
|
||||
private List<String> extractCredentialsString(String source) {
|
||||
|
||||
Matcher matcher = GROUP_PATTERN.matcher(source);
|
||||
List<String> list = new ArrayList<String>();
|
||||
|
||||
while (matcher.find()) {
|
||||
|
||||
String value = StringUtils.trimLeadingCharacter(matcher.group(), '\'');
|
||||
list.add(StringUtils.trimTrailingCharacter(value, '\''));
|
||||
}
|
||||
|
||||
if (!list.isEmpty()) {
|
||||
return list;
|
||||
}
|
||||
|
||||
return Arrays.asList(source.split(","));
|
||||
}
|
||||
|
||||
private static String[] extractUserNameAndPassword(String text) {
|
||||
|
||||
int dbSeperationIndex = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
String userNameAndPassword = text.substring(0, dbSeperationIndex);
|
||||
return userNameAndPassword.split(USERNAME_PASSWORD_DELIMINATOR);
|
||||
int index = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
|
||||
index = index != -1 ? index : text.lastIndexOf(OPTIONS_DELIMINATOR);
|
||||
|
||||
return index == -1 ? new String[] {} : text.substring(0, index).split(USERNAME_PASSWORD_DELIMINATOR);
|
||||
}
|
||||
|
||||
private static String extractDB(String text) {
|
||||
|
||||
int dbSeperationIndex = text.lastIndexOf(DATABASE_DELIMINATOR);
|
||||
|
||||
if (dbSeperationIndex == -1) {
|
||||
return "";
|
||||
}
|
||||
|
||||
String tmp = text.substring(dbSeperationIndex + 1);
|
||||
int optionsSeperationIndex = tmp.lastIndexOf(OPTIONS_DELIMINATOR);
|
||||
|
||||
@@ -129,4 +171,28 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
return properties;
|
||||
}
|
||||
|
||||
private static void verifyUsernameAndPasswordPresent(String[] source) {
|
||||
|
||||
verifyUserNamePresent(source);
|
||||
|
||||
if (source.length != 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Credentials need to specify username and password like in 'username:password@database'!");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyDatabasePresent(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyUserNamePresent(String[] source) {
|
||||
|
||||
if (source.length == 0 || !StringUtils.hasText(source[0])) {
|
||||
throw new IllegalArgumentException("Credentials need to specify username!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,6 +18,10 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.MongoParsingUtils.*;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.beans.factory.BeanDefinitionStoreException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
|
||||
@@ -34,6 +38,7 @@ import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.MongoURI;
|
||||
|
||||
/**
|
||||
@@ -42,9 +47,22 @@ import com.mongodb.MongoURI;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Viktor Khoroshko
|
||||
*/
|
||||
public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
private static final Set<String> MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES;
|
||||
|
||||
static {
|
||||
|
||||
Set<String> mongoUriAllowedAdditionalAttributes = new HashSet<String>();
|
||||
mongoUriAllowedAdditionalAttributes.add("id");
|
||||
mongoUriAllowedAdditionalAttributes.add("write-concern");
|
||||
|
||||
MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
@@ -64,29 +82,25 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
Object source = parserContext.extractSource(element);
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String uri = element.getAttribute("uri");
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
BeanDefinition userCredentials = getUserCredentialsBeanDefinition(element, parserContext);
|
||||
|
||||
// Common setup
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class);
|
||||
setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern");
|
||||
|
||||
if (StringUtils.hasText(uri)) {
|
||||
if (StringUtils.hasText(mongoRef) || StringUtils.hasText(dbname) || userCredentials != null) {
|
||||
parserContext.getReaderContext().error("Configure either Mongo URI or details individually!", source);
|
||||
}
|
||||
BeanDefinition mongoUri = getMongoUri(element, parserContext);
|
||||
|
||||
dbFactoryBuilder.addConstructorArgValue(getMongoUri(uri));
|
||||
if (mongoUri != null) {
|
||||
|
||||
dbFactoryBuilder.addConstructorArgValue(mongoUri);
|
||||
return getSourceBeanDefinition(dbFactoryBuilder, parserContext, element);
|
||||
}
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
BeanDefinition userCredentials = getUserCredentialsBeanDefinition(element, parserContext);
|
||||
|
||||
// Defaulting
|
||||
if (StringUtils.hasText(mongoRef)) {
|
||||
dbFactoryBuilder.addConstructorArgReference(mongoRef);
|
||||
@@ -147,14 +161,42 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI}.
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured
|
||||
* attributes. <br />
|
||||
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except
|
||||
* {@literal write-concern} and/or {@literal id}.
|
||||
*
|
||||
* @param uri
|
||||
* @return
|
||||
* @param element must not be {@literal null}.
|
||||
* @param parserContext
|
||||
* @return {@literal null} in case no client-/uri defined.
|
||||
*/
|
||||
private BeanDefinition getMongoUri(String uri) {
|
||||
private BeanDefinition getMongoUri(Element element, ParserContext parserContext) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(MongoURI.class);
|
||||
boolean hasClientUri = element.hasAttribute("client-uri");
|
||||
|
||||
if (!hasClientUri && !element.hasAttribute("uri")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
int allowedAttributesCount = 1;
|
||||
for (String attribute : MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES) {
|
||||
|
||||
if (element.hasAttribute(attribute)) {
|
||||
allowedAttributesCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error(
|
||||
"Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
Class<?> type = hasClientUri ? MongoClientURI.class : MongoURI.class;
|
||||
String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type);
|
||||
builder.addConstructorArgValue(uri);
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
|
||||
@@ -0,0 +1,145 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.Pair;
|
||||
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB
|
||||
* 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add
|
||||
* multiple single operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* {@link #execute()}.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @since 1.9
|
||||
*/
|
||||
public interface BulkOperations {
|
||||
|
||||
/**
|
||||
* Mode for bulk operation.
|
||||
**/
|
||||
public enum BulkMode {
|
||||
|
||||
/** Perform bulk operations in sequence. The first error will cancel processing. */
|
||||
ORDERED,
|
||||
|
||||
/** Perform bulk operations in parallel. Processing will continue on errors. */
|
||||
UNORDERED
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a single insert to the bulk operation.
|
||||
*
|
||||
* @param documents the document to insert, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations insert(Object documents);
|
||||
|
||||
/**
|
||||
* Add a list of inserts to the bulk operation.
|
||||
*
|
||||
* @param documents List of documents to insert, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the insert added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations insert(List<? extends Object> documents);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
||||
*
|
||||
* @param query update criteria, must not be {@literal null}.
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
|
||||
*
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(List<Pair<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
|
||||
*
|
||||
* @param updates Update operations to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(List<Pair<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations upsert(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param updates Updates/insert operations to perform.
|
||||
* @return The bulk operation.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations upsert(List<Pair<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single remove operation to the bulk operation.
|
||||
*
|
||||
* @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations remove(Query remove);
|
||||
|
||||
/**
|
||||
* Add a list of remove operations to the bulk operation.
|
||||
*
|
||||
* @param removes the remove operations to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the removal added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
* @return Result of the bulk operation providing counters for inserts/updates etc.
|
||||
* @throws {@link BulkOperationException} if an error occurred during bulk processing.
|
||||
*/
|
||||
BulkWriteResult execute();
|
||||
}
|
||||
@@ -25,7 +25,7 @@ import com.mongodb.DBCursor;
|
||||
interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns th eprepared cursor.
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param cursor
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,337 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteOperation;
|
||||
import com.mongodb.BulkWriteRequestBuilder;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final BulkMode bulkMode;
|
||||
private final String collectionName;
|
||||
private final Class<?> entityType;
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private WriteConcernResolver writeConcernResolver;
|
||||
private WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOperation bulk;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultBulkOperations} for the given {@link MongoOperations}, {@link BulkMode}, collection
|
||||
* name and {@link WriteConcern}.
|
||||
*
|
||||
* @param mongoOperations The underlying {@link MongoOperations}, must not be {@literal null}.
|
||||
* @param bulkMode must not be {@literal null}.
|
||||
* @param collectionName Name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @param entityType the entity type, can be {@literal null}.
|
||||
*/
|
||||
DefaultBulkOperations(MongoOperations mongoOperations, BulkMode bulkMode, String collectionName,
|
||||
Class<?> entityType) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.notNull(bulkMode, "BulkMode must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.bulkMode = bulkMode;
|
||||
this.collectionName = collectionName;
|
||||
this.entityType = entityType;
|
||||
|
||||
this.exceptionTranslator = new MongoExceptionTranslator();
|
||||
this.writeConcernResolver = DefaultWriteConcernResolver.INSTANCE;
|
||||
|
||||
this.bulk = initBulkOperation();
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @param exceptionTranslator can be {@literal null}.
|
||||
*/
|
||||
public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) {
|
||||
this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcernResolver} to be used. Defaults to {@link DefaultWriteConcernResolver}.
|
||||
*
|
||||
* @param writeConcernResolver can be {@literal null}.
|
||||
*/
|
||||
public void setWriteConcernResolver(WriteConcernResolver writeConcernResolver) {
|
||||
this.writeConcernResolver = writeConcernResolver == null ? DefaultWriteConcernResolver.INSTANCE
|
||||
: writeConcernResolver;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
|
||||
*
|
||||
* @param defaultWriteConcern can be {@literal null}.
|
||||
*/
|
||||
public void setDefaultWriteConcern(WriteConcern defaultWriteConcern) {
|
||||
this.defaultWriteConcern = defaultWriteConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(Object document) {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
if (document instanceof DBObject) {
|
||||
|
||||
bulk.insert((DBObject) document);
|
||||
return this;
|
||||
}
|
||||
|
||||
DBObject sink = new BasicDBObject();
|
||||
mongoOperations.getConverter().write(document, sink);
|
||||
bulk.insert(sink);
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(List<? extends Object> documents) {
|
||||
|
||||
Assert.notNull(documents, "Documents must not be null!");
|
||||
|
||||
for (Object document : documents) {
|
||||
insert(document);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateOne(Query query, Update update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
return updateOne(Arrays.asList(Pair.of(query, update)));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateOne(List<Pair<Query, Update>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, false);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateMulti(Query query, Update update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
return updateMulti(Arrays.asList(Pair.of(query, update)));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateMulti(List<Pair<Query, Update>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, true);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(Query query, Update update) {
|
||||
return update(query, update, true, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(List<Pair<Query, Update>> updates) {
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
upsert(update.getFirst(), update.getSecond());
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
|
||||
bulk.find(query.getQueryObject()).remove();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(List<Query> removes) {
|
||||
|
||||
Assert.notNull(removes, "Removals must not be null!");
|
||||
|
||||
for (Query query : removes) {
|
||||
remove(query);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
*/
|
||||
@Override
|
||||
public BulkWriteResult execute() {
|
||||
|
||||
MongoAction action = new MongoAction(defaultWriteConcern, MongoActionOperation.BULK, collectionName, entityType,
|
||||
null, null);
|
||||
WriteConcern writeConcern = writeConcernResolver.resolve(action);
|
||||
|
||||
try {
|
||||
|
||||
return writeConcern == null ? bulk.execute() : bulk.execute(writeConcern);
|
||||
|
||||
} catch (BulkWriteException o_O) {
|
||||
|
||||
DataAccessException toThrow = exceptionTranslator.translateExceptionIfPossible(o_O);
|
||||
throw toThrow == null ? o_O : toThrow;
|
||||
|
||||
} finally {
|
||||
this.bulk = initBulkOperation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
* @param query the {@link Query} to determine documents to update.
|
||||
* @param update the {@link Update} to perform, must not be {@literal null}.
|
||||
* @param upsert whether to upsert.
|
||||
* @param multi whether to issue a multi-update.
|
||||
* @return the {@link BulkOperations} with the update registered.
|
||||
*/
|
||||
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
BulkWriteRequestBuilder builder = bulk.find(query.getQueryObject());
|
||||
|
||||
if (upsert) {
|
||||
|
||||
if (multi) {
|
||||
builder.upsert().update(update.getUpdateObject());
|
||||
} else {
|
||||
builder.upsert().updateOne(update.getUpdateObject());
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
if (multi) {
|
||||
builder.update(update.getUpdateObject());
|
||||
} else {
|
||||
builder.updateOne(update.getUpdateObject());
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
private final BulkWriteOperation initBulkOperation() {
|
||||
|
||||
DBCollection collection = mongoOperations.getCollection(collectionName);
|
||||
|
||||
switch (bulkMode) {
|
||||
case ORDERED:
|
||||
return collection.initializeOrderedBulkOperation();
|
||||
case UNORDERED:
|
||||
return collection.initializeUnorderedBulkOperation();
|
||||
}
|
||||
|
||||
throw new IllegalStateException("BulkMode was null!");
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,17 +15,15 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexField;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBCollection;
|
||||
@@ -42,12 +40,12 @@ import com.mongodb.MongoException;
|
||||
*/
|
||||
public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
private static final Collection<String> TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere");
|
||||
private static final String PARTIAL_FILTER_EXPRESSION_KEY = "partialFilterExpression";
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
private final QueryMapper mapper;
|
||||
private final Class<?> type;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultIndexOperations}.
|
||||
@@ -56,12 +54,26 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* @param collectionName must not be {@literal null}.
|
||||
*/
|
||||
public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName) {
|
||||
this(mongoOperations, collectionName, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultIndexOperations}.
|
||||
*
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @param type Type used for mapping potential partial index filter expression. Can be {@literal null}.
|
||||
* @since 1.10
|
||||
*/
|
||||
public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.notNull(collectionName, "Collection name can not be null!");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
this.mapper = new QueryMapper(mongoOperations.getConverter());
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -69,9 +81,20 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* @see org.springframework.data.mongodb.core.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)
|
||||
*/
|
||||
public void ensureIndex(final IndexDefinition indexDefinition) {
|
||||
|
||||
mongoOperations.execute(collectionName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
DBObject indexOptions = indexDefinition.getIndexOptions();
|
||||
|
||||
if (indexOptions != null && indexOptions.containsField(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
|
||||
Assert.isInstanceOf(DBObject.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
indexOptions.put(PARTIAL_FILTER_EXPRESSION_KEY,
|
||||
mapper.getMappedObject((DBObject) indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY),
|
||||
lookupPersistentEntity(type, collectionName)));
|
||||
}
|
||||
|
||||
if (indexOptions != null) {
|
||||
collection.createIndex(indexDefinition.getIndexKeys(), indexOptions);
|
||||
} else {
|
||||
@@ -79,6 +102,24 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private MongoPersistentEntity<?> lookupPersistentEntity(Class<?> entityType, String collection) {
|
||||
|
||||
if (entityType != null) {
|
||||
return mongoOperations.getConverter().getMappingContext().getPersistentEntity(entityType);
|
||||
}
|
||||
|
||||
Collection<? extends MongoPersistentEntity<?>> entities = mongoOperations.getConverter().getMappingContext()
|
||||
.getPersistentEntities();
|
||||
|
||||
for (MongoPersistentEntity<?> entity : entities) {
|
||||
if (entity.getCollection().equals(collection)) {
|
||||
return entity;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -126,7 +167,9 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
public List<IndexInfo> getIndexInfo() {
|
||||
|
||||
return mongoOperations.execute(collectionName, new CollectionCallback<List<IndexInfo>>() {
|
||||
|
||||
public List<IndexInfo> doInCollection(DBCollection collection) throws MongoException, DataAccessException {
|
||||
|
||||
List<DBObject> dbObjectList = collection.getIndexInfo();
|
||||
return getIndexData(dbObjectList);
|
||||
}
|
||||
@@ -136,44 +179,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
List<IndexInfo> indexInfoList = new ArrayList<IndexInfo>();
|
||||
|
||||
for (DBObject ix : dbObjectList) {
|
||||
|
||||
DBObject keyDbObject = (DBObject) ix.get("key");
|
||||
int numberOfElements = keyDbObject.keySet().size();
|
||||
|
||||
List<IndexField> indexFields = new ArrayList<IndexField>(numberOfElements);
|
||||
|
||||
for (String key : keyDbObject.keySet()) {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if (TWO_D_IDENTIFIERS.contains(value)) {
|
||||
indexFields.add(IndexField.geo(key));
|
||||
} else if ("text".equals(value)) {
|
||||
|
||||
DBObject weights = (DBObject) ix.get("weights");
|
||||
for (String fieldName : weights.keySet()) {
|
||||
indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString())));
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String name = ix.get("name").toString();
|
||||
|
||||
boolean unique = ix.containsField("unique") ? (Boolean) ix.get("unique") : false;
|
||||
boolean dropDuplicates = ix.containsField("dropDups") ? (Boolean) ix.get("dropDups") : false;
|
||||
boolean sparse = ix.containsField("sparse") ? (Boolean) ix.get("sparse") : false;
|
||||
String language = ix.containsField("default_language") ? (String) ix.get("default_language") : "";
|
||||
indexInfoList.add(new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language));
|
||||
indexInfoList.add(IndexInfo.indexInfoOf(ix));
|
||||
}
|
||||
|
||||
return indexInfoList;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -98,7 +98,7 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
|
||||
@Override
|
||||
public Object doInDB(DB db) throws MongoException, DataAccessException {
|
||||
return db.eval(script.getCode(), convertScriptArgs(args));
|
||||
return db.eval(script.getCode(), convertScriptArgs(false, args));
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -155,7 +155,7 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
return scriptNames;
|
||||
}
|
||||
|
||||
private Object[] convertScriptArgs(Object... args) {
|
||||
private Object[] convertScriptArgs(boolean quote, Object... args) {
|
||||
|
||||
if (ObjectUtils.isEmpty(args)) {
|
||||
return args;
|
||||
@@ -164,15 +164,15 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
List<Object> convertedValues = new ArrayList<Object>(args.length);
|
||||
|
||||
for (Object arg : args) {
|
||||
convertedValues.add(arg instanceof String ? String.format("'%s'", arg) : this.mongoOperations.getConverter()
|
||||
.convertToMongoType(arg));
|
||||
convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg)
|
||||
: this.mongoOperations.getConverter().convertToMongoType(arg));
|
||||
}
|
||||
|
||||
return convertedValues.toArray();
|
||||
}
|
||||
|
||||
private String convertAndJoinScriptArgs(Object... args) {
|
||||
return ObjectUtils.isEmpty(args) ? "" : StringUtils.arrayToCommaDelimitedString(convertScriptArgs(args));
|
||||
return ObjectUtils.isEmpty(args) ? "" : StringUtils.arrayToCommaDelimitedString(convertScriptArgs(true, args));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* Default {@link WriteConcernResolver} resolving the {@link WriteConcern} from the given {@link MongoAction}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
enum DefaultWriteConcernResolver implements WriteConcernResolver {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
public WriteConcern resolve(MongoAction action) {
|
||||
return action.getDefaultWriteConcern();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Value object to mitigate different representations of geo command execution results in MongoDB.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @soundtrack Fruitcake - Jeff Coffin (The Inside of the Outside)
|
||||
*/
|
||||
class GeoCommandStatistics {
|
||||
|
||||
private static final GeoCommandStatistics NONE = new GeoCommandStatistics(new BasicDBObject());
|
||||
|
||||
private final DBObject source;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoCommandStatistics} instance with the given source document.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
private GeoCommandStatistics(DBObject source) {
|
||||
|
||||
Assert.notNull(source, "Source document must not be null!");
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoCommandStatistics} from the given command result extracting the statistics.
|
||||
*
|
||||
* @param commandResult must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static GeoCommandStatistics from(DBObject commandResult) {
|
||||
|
||||
Assert.notNull(commandResult, "Command result must not be null!");
|
||||
|
||||
Object stats = commandResult.get("stats");
|
||||
return stats == null ? NONE : new GeoCommandStatistics((DBObject) stats);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the average distance reported by the command result. Mitigating a removal of the field in case the command
|
||||
* didn't return any result introduced in MongoDB 3.2 RC1.
|
||||
*
|
||||
* @return
|
||||
* @see https://jira.mongodb.org/browse/SERVER-21024
|
||||
*/
|
||||
public double getAverageDistance() {
|
||||
|
||||
Object averageDistance = source.get("avgDistance");
|
||||
return averageDistance == null ? Double.NaN : (Double) averageDistance;
|
||||
}
|
||||
}
|
||||
@@ -49,7 +49,7 @@ public class MongoAction {
|
||||
* @param collectionName the collection name, must not be {@literal null} or empty.
|
||||
* @param entityType the POJO that is being operated against
|
||||
* @param document the converted DBObject from the POJO or Spring Update object
|
||||
* @param query the converted DBOjbect from the Spring Query object
|
||||
* @param query the converted DBObject from the Spring Query object
|
||||
*/
|
||||
public MongoAction(WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation,
|
||||
String collectionName, Class<?> entityType, DBObject document, DBObject query) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,5 +25,5 @@ package org.springframework.data.mongodb.core;
|
||||
*/
|
||||
public enum MongoActionOperation {
|
||||
|
||||
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE
|
||||
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK;
|
||||
}
|
||||
|
||||
@@ -123,7 +123,7 @@ public abstract class MongoDbUtils {
|
||||
|
||||
DB db = mongo.getDB(databaseName);
|
||||
|
||||
if (requiresAuthDbAuthentication(credentials)) {
|
||||
if (!(mongo instanceof MongoClient) && requiresAuthDbAuthentication(credentials)) {
|
||||
ReflectiveDbInvoker.authenticate(mongo, db, credentials, authenticationDatabaseName);
|
||||
}
|
||||
|
||||
@@ -199,8 +199,8 @@ public abstract class MongoDbUtils {
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if credentials present. In case we're using a monog-java-driver version 3 or above we do not have the need
|
||||
* for authentication as the auth data has to be provied within the MongoClient
|
||||
* Check if credentials present. In case we're using a mongo-java-driver version 3 or above we do not have the need
|
||||
* for authentication as the auth data has to be provided within the MongoClient
|
||||
*
|
||||
* @param credentials
|
||||
* @return
|
||||
|
||||
@@ -25,10 +25,14 @@ import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.dao.DuplicateKeyException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
/**
|
||||
@@ -42,12 +46,12 @@ import com.mongodb.MongoException;
|
||||
*/
|
||||
public class MongoExceptionTranslator implements PersistenceExceptionTranslator {
|
||||
|
||||
private static final Set<String> DULICATE_KEY_EXCEPTIONS = new HashSet<String>(Arrays.asList(
|
||||
"MongoException.DuplicateKey", "DuplicateKeyException"));
|
||||
private static final Set<String> DULICATE_KEY_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoException.DuplicateKey", "DuplicateKeyException"));
|
||||
|
||||
private static final Set<String> RESOURCE_FAILURE_EXCEPTIONS = new HashSet<String>(Arrays.asList(
|
||||
"MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound",
|
||||
"MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException"));
|
||||
private static final Set<String> RESOURCE_FAILURE_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound",
|
||||
"MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException"));
|
||||
|
||||
private static final Set<String> RESOURCE_USAGE_EXCEPTIONS = new HashSet<String>(
|
||||
Arrays.asList("MongoInternalException"));
|
||||
@@ -81,17 +85,24 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof BulkWriteException) {
|
||||
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
int code = ((MongoException) ex).getCode();
|
||||
|
||||
if (code == 11000 || code == 11001) {
|
||||
if (MongoDbErrorCodes.isDuplicateKeyCode(code)) {
|
||||
throw new DuplicateKeyException(ex.getMessage(), ex);
|
||||
} else if (code == 12000 || code == 13440) {
|
||||
} else if (MongoDbErrorCodes.isDataAccessResourceFailureCode(code)) {
|
||||
throw new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
} else if (code == 10003 || code == 12001 || code == 12010 || code == 12011 || code == 12012) {
|
||||
} else if (MongoDbErrorCodes.isInvalidDataAccessApiUsageCode(code) || code == 10003 || code == 12001
|
||||
|| code == 12010 || code == 12011 || code == 12012) {
|
||||
throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
} else if (MongoDbErrorCodes.isPermissionDeniedCode(code)) {
|
||||
throw new PermissionDeniedDataAccessException(ex.getMessage(), ex);
|
||||
}
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
@@ -190,7 +191,7 @@ public interface MongoOperations {
|
||||
<T> DBCollection createCollection(Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Create a collect with a name based on the provided entity class using the options.
|
||||
* Create a collection with a name based on the provided entity class using the options.
|
||||
*
|
||||
* @param entityClass class that determines the collection to create
|
||||
* @param collectionOptions options to use when creating the collection.
|
||||
@@ -207,7 +208,7 @@ public interface MongoOperations {
|
||||
DBCollection createCollection(String collectionName);
|
||||
|
||||
/**
|
||||
* Create a collect with the provided name and options.
|
||||
* Create a collection with the provided name and options.
|
||||
*
|
||||
* @param collectionName name of the collection
|
||||
* @param collectionOptions options to use when creating the collection.
|
||||
@@ -292,6 +293,34 @@ public interface MongoOperations {
|
||||
*/
|
||||
ScriptOperations scriptOps();
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given collection.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link BulkOperations} on the named collection
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given entity type.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityType the name of the entity class, must not be {@literal null}.
|
||||
* @return {@link BulkOperations} on the named collection associated of the given entity class.
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, Class<?> entityType);
|
||||
|
||||
/**
|
||||
* Returns a new {@link BulkOperations} for the given entity type and collection name.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityClass the name of the entity class, must not be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link BulkOperations} on the named collection associated with the given entity class.
|
||||
*/
|
||||
BulkOperations bulkOps(BulkMode mode, Class<?> entityType, String collectionName);
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the collection used by the entity class.
|
||||
* <p/>
|
||||
@@ -600,8 +629,8 @@ public interface MongoOperations {
|
||||
<T> T findById(Object id, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
@@ -612,8 +641,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
@@ -625,8 +654,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -639,8 +668,8 @@ public interface MongoOperations {
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* Triggers <a href="http://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify
|
||||
* <a/> to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -728,9 +757,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -785,9 +814,9 @@ public interface MongoOperations {
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert"
|
||||
* >Spring's Type Conversion"</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/validation.html#core-convert" >
|
||||
* Spring's Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection
|
||||
*/
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -19,6 +19,7 @@ import java.net.UnknownHostException;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
@@ -103,8 +104,8 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
*/
|
||||
@Deprecated
|
||||
public SimpleMongoDbFactory(MongoURI uri) throws MongoException, UnknownHostException {
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())),
|
||||
true, uri.getDatabase());
|
||||
this(new Mongo(uri), uri.getDatabase(), new UserCredentials(uri.getUsername(), parseChars(uri.getPassword())), true,
|
||||
uri.getDatabase());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -132,6 +133,11 @@ public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory {
|
||||
private SimpleMongoDbFactory(Mongo mongo, String databaseName, UserCredentials credentials,
|
||||
boolean mongoInstanceCreated, String authenticationDatabaseName) {
|
||||
|
||||
if (mongo instanceof MongoClient && (credentials != null && !UserCredentials.NO_CREDENTIALS.equals(credentials))) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"Usage of 'UserCredentials' with 'MongoClient' is no longer supported. Please use 'MongoCredential' for 'MongoClient' or just 'Mongo'.");
|
||||
}
|
||||
|
||||
Assert.notNull(mongo, "Mongo must not be null");
|
||||
Assert.hasText(databaseName, "Database name must not be empty");
|
||||
Assert.isTrue(databaseName.matches("[\\w-]+"),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,7 @@ import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
@@ -37,10 +38,12 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* An {@code Aggregation} is a representation of a list of aggregation steps to be performed by the MongoDB Aggregation
|
||||
* Framework.
|
||||
*
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Alessio Fachechi
|
||||
* @since 1.3
|
||||
*/
|
||||
public class Aggregation {
|
||||
@@ -65,7 +68,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(List<? extends AggregationOperation> operations) {
|
||||
@@ -74,7 +77,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
public static Aggregation newAggregation(AggregationOperation... operations) {
|
||||
@@ -84,7 +87,7 @@ public class Aggregation {
|
||||
/**
|
||||
* Returns a copy of this {@link Aggregation} with the given {@link AggregationOptions} set. Note that options are
|
||||
* supported in MongoDB version 2.6+.
|
||||
*
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return
|
||||
* @since 1.6
|
||||
@@ -97,7 +100,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
@@ -107,7 +110,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypedAggregation} for the given type and {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param operations must not be {@literal null} or empty.
|
||||
*/
|
||||
@@ -117,7 +120,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(AggregationOperation... aggregationOperations) {
|
||||
@@ -137,7 +140,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(List<AggregationOperation> aggregationOperations) {
|
||||
@@ -146,14 +149,14 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
* @param options must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(List<AggregationOperation> aggregationOperations, AggregationOptions options) {
|
||||
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
Assert.isTrue(aggregationOperations.size() > 0, "At least one AggregationOperation has to be provided");
|
||||
Assert.isTrue(!aggregationOperations.isEmpty(), "At least one AggregationOperation has to be provided");
|
||||
Assert.notNull(options, "AggregationOptions must not be null!");
|
||||
|
||||
this.operations = aggregationOperations;
|
||||
@@ -162,7 +165,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* A pointer to the previous {@link AggregationOperation}.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static String previousOperation() {
|
||||
@@ -171,7 +174,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given fields.
|
||||
*
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -181,7 +184,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} includeing the given {@link Fields}.
|
||||
*
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -191,7 +194,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link UnwindOperation} for the field with the given name.
|
||||
*
|
||||
*
|
||||
* @param fieldName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
@@ -201,7 +204,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} for the given fields.
|
||||
*
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -211,7 +214,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link GroupOperation} for the given {@link Fields}.
|
||||
*
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -221,7 +224,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link SortOperation} for the given {@link Sort}.
|
||||
*
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -231,7 +234,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link SortOperation} for the given sort {@link Direction} and {@code fields}.
|
||||
*
|
||||
*
|
||||
* @param direction must not be {@literal null}.
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
@@ -242,17 +245,28 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link SkipOperation} skipping the given number of elements.
|
||||
*
|
||||
*
|
||||
* @param elementsToSkip must not be less than zero.
|
||||
* @return
|
||||
* @deprecated prepare to get this one removed in favor of {@link #skip(long)}.
|
||||
*/
|
||||
public static SkipOperation skip(int elementsToSkip) {
|
||||
return new SkipOperation(elementsToSkip);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SkipOperation} skipping the given number of elements.
|
||||
*
|
||||
* @param elementsToSkip must not be less than zero.
|
||||
* @return
|
||||
*/
|
||||
public static SkipOperation skip(long elementsToSkip) {
|
||||
return new SkipOperation(elementsToSkip);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LimitOperation} limiting the result to the given number of elements.
|
||||
*
|
||||
*
|
||||
* @param maxElements must not be less than zero.
|
||||
* @return
|
||||
*/
|
||||
@@ -262,7 +276,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} using the given {@link Criteria}.
|
||||
*
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@@ -270,12 +284,40 @@ public class Aggregation {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LookupOperation}.
|
||||
*
|
||||
* @param from must not be {@literal null}.
|
||||
* @param localField must not be {@literal null}.
|
||||
* @param foreignField must not be {@literal null}.
|
||||
* @param as must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 1.9
|
||||
*/
|
||||
public static LookupOperation lookup(String from, String localField, String foreignField, String as) {
|
||||
return lookup(field(from), field(localField), field(foreignField), field(as));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LookupOperation} for the given {@link Fields}.
|
||||
*
|
||||
* @param from must not be {@literal null}.
|
||||
* @param localField must not be {@literal null}.
|
||||
* @param foreignField must not be {@literal null}.
|
||||
* @param as must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 1.9
|
||||
*/
|
||||
public static LookupOperation lookup(Field from, Field localField, Field foreignField, Field as) {
|
||||
return new LookupOperation(from, localField, foreignField, as);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for the given field names.
|
||||
*
|
||||
* @see Fields#fields(String...)
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return
|
||||
* @see Fields#fields(String...)
|
||||
*/
|
||||
public static Fields fields(String... fields) {
|
||||
return Fields.fields(fields);
|
||||
@@ -283,7 +325,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance from the given field name and target reference.
|
||||
*
|
||||
*
|
||||
* @param name must not be {@literal null} or empty.
|
||||
* @param target must not be {@literal null} or empty.
|
||||
* @return
|
||||
@@ -295,7 +337,7 @@ public class Aggregation {
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
@@ -307,7 +349,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions.Builder}.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
* @since 1.6
|
||||
*/
|
||||
@@ -317,7 +359,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Converts this {@link Aggregation} specification to a {@link DBObject}.
|
||||
*
|
||||
*
|
||||
* @param inputCollectionName the name of the input collection
|
||||
* @return the {@code DBObject} representing this aggregation
|
||||
*/
|
||||
@@ -331,8 +373,14 @@ public class Aggregation {
|
||||
operationDocuments.add(operation.toDBObject(context));
|
||||
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
|
||||
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), rootContext);
|
||||
|
||||
if (operation instanceof InheritsFieldsAggregationOperation) {
|
||||
context = new InheritingExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), context);
|
||||
} else {
|
||||
context = new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), context);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,7 +404,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is.
|
||||
*
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private static class NoOpAggregationOperationContext implements AggregationOperationContext {
|
||||
@@ -391,7 +439,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Describes the system variables available in MongoDB aggregation framework pipeline expressions.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation-variables
|
||||
*/
|
||||
@@ -404,7 +452,7 @@ public class Aggregation {
|
||||
/**
|
||||
* Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false}
|
||||
* otherwise.
|
||||
*
|
||||
*
|
||||
* @param fieldRef may be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015 the original author or authors.
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,16 +20,17 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like
|
||||
* {@code project} and {@code group}.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
interface AggregationExpression {
|
||||
public interface AggregationExpression {
|
||||
|
||||
/**
|
||||
* Turns the {@link AggregationExpression} into a {@link DBObject} within the given
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
*
|
||||
* @param context
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
@@ -56,7 +57,7 @@ public enum AggregationFunctionExpressions {
|
||||
static class FunctionExpression implements AggregationExpression {
|
||||
|
||||
private final String name;
|
||||
private final Object[] values;
|
||||
private final List<Object> values;
|
||||
|
||||
/**
|
||||
* Creates a new {@link FunctionExpression} for the given name and values.
|
||||
@@ -70,7 +71,7 @@ public enum AggregationFunctionExpressions {
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
|
||||
this.name = name;
|
||||
this.values = values;
|
||||
this.values = Arrays.asList(values);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -80,10 +81,10 @@ public enum AggregationFunctionExpressions {
|
||||
@Override
|
||||
public DBObject toDbObject(AggregationOperationContext context) {
|
||||
|
||||
List<Object> args = new ArrayList<Object>(values.length);
|
||||
List<Object> args = new ArrayList<Object>(values.size());
|
||||
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
args.add(unpack(values[i], context));
|
||||
for (Object value : values) {
|
||||
args.add(unpack(value, context));
|
||||
}
|
||||
|
||||
return new BasicDBObject("$" + name, args);
|
||||
|
||||
@@ -88,7 +88,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either sythetic or non-synthetic way.
|
||||
* Creates a new {@link ExposedFields} instance for the given fields in either synthetic or non-synthetic way.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param synthetic
|
||||
@@ -107,7 +107,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFields} with the given orignals and synthetics.
|
||||
* Creates a new {@link ExposedFields} with the given originals and synthetics.
|
||||
*
|
||||
* @param originals must not be {@literal null}.
|
||||
* @param synthetic must not be {@literal null}.
|
||||
@@ -363,7 +363,7 @@ public final class ExposedFields implements Iterable<ExposedField> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the referenve value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* Returns the reference value for the given field reference. Will return 1 for a synthetic, unaliased field or the
|
||||
* raw rendering of the reference otherwise.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,9 +24,10 @@ import com.mongodb.DBObject;
|
||||
/**
|
||||
* {@link AggregationOperationContext} that combines the available field references from a given
|
||||
* {@code AggregationOperationContext} and an {@link FieldsExposingAggregationOperation}.
|
||||
*
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.4
|
||||
*/
|
||||
class ExposedFieldsAggregationOperationContext implements AggregationOperationContext {
|
||||
@@ -37,11 +38,12 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given
|
||||
* {@link AggregationOperationContext} to perform a mapping to mongo types if necessary.
|
||||
*
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
* @param rootContext must not be {@literal null}.
|
||||
*/
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, AggregationOperationContext rootContext) {
|
||||
public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields,
|
||||
AggregationOperationContext rootContext) {
|
||||
|
||||
Assert.notNull(exposedFields, "ExposedFields must not be null!");
|
||||
Assert.notNull(rootContext, "RootContext must not be null!");
|
||||
@@ -79,7 +81,7 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
|
||||
/**
|
||||
* Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}.
|
||||
*
|
||||
*
|
||||
* @param field may be {@literal null}
|
||||
* @param name must not be {@literal null}
|
||||
* @return
|
||||
@@ -88,6 +90,22 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
|
||||
Assert.notNull(name, "Name must not be null!");
|
||||
|
||||
FieldReference exposedField = resolveExposedField(field, name);
|
||||
if (exposedField != null) {
|
||||
return exposedField;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a {@link field}/{@link name} for a {@link FieldReference} if possible.
|
||||
*
|
||||
* @param field may be {@literal null}
|
||||
* @param name must not be {@literal null}
|
||||
* @return the resolved reference or {@literal null}
|
||||
*/
|
||||
protected FieldReference resolveExposedField(Field field, String name) {
|
||||
ExposedField exposedField = exposedFields.getField(name);
|
||||
|
||||
if (exposedField != null) {
|
||||
@@ -111,7 +129,6 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
return new FieldReference(new ExposedField(name, true));
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,17 +16,28 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperation} that exposes new {@link ExposedFields} that can be used for later aggregation pipeline
|
||||
* {@code AggregationOperation}s.
|
||||
*
|
||||
* {@link AggregationOperation} that exposes {@link ExposedFields} that can be used for later aggregation pipeline
|
||||
* {@code AggregationOperation}s. A {@link FieldsExposingAggregationOperation} implementing the
|
||||
* {@link InheritsFieldsAggregationOperation} will expose fields from its parent operations. Not implementing
|
||||
* {@link InheritsFieldsAggregationOperation} will replace existing exposed fields.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public interface FieldsExposingAggregationOperation extends AggregationOperation {
|
||||
|
||||
/**
|
||||
* Returns the fields exposed by the {@link AggregationOperation}.
|
||||
*
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
ExposedFields getFields();
|
||||
|
||||
/**
|
||||
* Marker interface for {@link AggregationOperation} that inherits fields from previous operations.
|
||||
*/
|
||||
static interface InheritsFieldsAggregationOperation extends FieldsExposingAggregationOperation {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ExposedFieldsAggregationOperationContext} that inherits fields from its parent
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAggregationOperationContext {
|
||||
|
||||
private final AggregationOperationContext previousContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given
|
||||
* {@link AggregationOperationContext} to perform a mapping to mongo types if necessary.
|
||||
*
|
||||
* @param exposedFields must not be {@literal null}.
|
||||
* @param previousContext must not be {@literal null}.
|
||||
*/
|
||||
public InheritingExposedFieldsAggregationOperationContext(ExposedFields exposedFields,
|
||||
AggregationOperationContext previousContext) {
|
||||
|
||||
super(exposedFields, previousContext);
|
||||
Assert.notNull(previousContext, "PreviousContext must not be null!");
|
||||
this.previousContext = previousContext;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#resolveExposedField(org.springframework.data.mongodb.core.aggregation.Field, java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
protected FieldReference resolveExposedField(Field field, String name) {
|
||||
|
||||
FieldReference fieldReference = super.resolveExposedField(field, name);
|
||||
if (fieldReference != null) {
|
||||
return fieldReference;
|
||||
}
|
||||
|
||||
if (field != null) {
|
||||
return previousContext.getReference(field);
|
||||
}
|
||||
|
||||
return previousContext.getReference(name);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,196 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the static factory method
|
||||
* {@link Aggregation#lookup(String, String, String, String)} instead of creating instances of this class directly.
|
||||
*
|
||||
* @author Alessio Fachechi
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see http://docs.mongodb.org/manual/reference/aggregation/lookup/#stage._S_lookup
|
||||
* @since 1.9
|
||||
*/
|
||||
public class LookupOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation {
|
||||
|
||||
private Field from;
|
||||
private Field localField;
|
||||
private Field foreignField;
|
||||
private ExposedField as;
|
||||
|
||||
/**
|
||||
* Creates a new {@link LookupOperation} for the given {@link Field}s.
|
||||
*
|
||||
* @param from must not be {@literal null}.
|
||||
* @param localField must not be {@literal null}.
|
||||
* @param foreignField must not be {@literal null}.
|
||||
* @param as must not be {@literal null}.
|
||||
*/
|
||||
public LookupOperation(Field from, Field localField, Field foreignField, Field as) {
|
||||
|
||||
Assert.notNull(from, "From must not be null!");
|
||||
Assert.notNull(localField, "LocalField must not be null!");
|
||||
Assert.notNull(foreignField, "ForeignField must not be null!");
|
||||
Assert.notNull(as, "As must not be null!");
|
||||
|
||||
this.from = from;
|
||||
this.localField = localField;
|
||||
this.foreignField = foreignField;
|
||||
this.as = new ExposedField(as, true);
|
||||
}
|
||||
|
||||
private LookupOperation() {
|
||||
// used by builder
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return ExposedFields.from(as);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public DBObject toDBObject(AggregationOperationContext context) {
|
||||
|
||||
BasicDBObject lookupObject = new BasicDBObject();
|
||||
|
||||
lookupObject.append("from", from.getTarget());
|
||||
lookupObject.append("localField", localField.getTarget());
|
||||
lookupObject.append("foreignField", foreignField.getTarget());
|
||||
lookupObject.append("as", as.getTarget());
|
||||
|
||||
return new BasicDBObject("$lookup", lookupObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a builder that allows creation of {@link LookupOperation}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static FromBuilder newLookup() {
|
||||
return new LookupOperationBuilder();
|
||||
}
|
||||
|
||||
public static interface FromBuilder {
|
||||
|
||||
/**
|
||||
* @param name the collection in the same database to perform the join with, must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
LocalFieldBuilder from(String name);
|
||||
}
|
||||
|
||||
public static interface LocalFieldBuilder {
|
||||
|
||||
/**
|
||||
* @param name the field from the documents input to the {@code $lookup} stage, must not be {@literal null} or
|
||||
* empty.
|
||||
* @return
|
||||
*/
|
||||
ForeignFieldBuilder localField(String name);
|
||||
}
|
||||
|
||||
public static interface ForeignFieldBuilder {
|
||||
|
||||
/**
|
||||
* @param name the field from the documents in the {@code from} collection, must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
AsBuilder foreignField(String name);
|
||||
}
|
||||
|
||||
public static interface AsBuilder {
|
||||
|
||||
/**
|
||||
* @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
LookupOperation as(String name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for fluent {@link LookupOperation} creation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
public static final class LookupOperationBuilder
|
||||
implements FromBuilder, LocalFieldBuilder, ForeignFieldBuilder, AsBuilder {
|
||||
|
||||
private final LookupOperation lookupOperation;
|
||||
|
||||
private LookupOperationBuilder() {
|
||||
this.lookupOperation = new LookupOperation();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new builder for {@link LookupOperation}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static FromBuilder newBuilder() {
|
||||
return new LookupOperationBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LocalFieldBuilder from(String name) {
|
||||
|
||||
Assert.hasText(name, "'From' must not be null or empty!");
|
||||
lookupOperation.from = Fields.field(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LookupOperation as(String name) {
|
||||
|
||||
Assert.hasText(name, "'As' must not be null or empty!");
|
||||
lookupOperation.as = new ExposedField(Fields.field(name), true);
|
||||
return new LookupOperation(lookupOperation.from, lookupOperation.localField, lookupOperation.foreignField,
|
||||
lookupOperation.as);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AsBuilder foreignField(String name) {
|
||||
|
||||
Assert.hasText(name, "'ForeignField' must not be null or empty!");
|
||||
lookupOperation.foreignField = Fields.field(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ForeignFieldBuilder localField(String name) {
|
||||
|
||||
Assert.hasText(name, "'LocalField' must not be null or empty!");
|
||||
lookupOperation.localField = Fields.field(name);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
* Copyright 2013-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,7 +20,9 @@ import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.FieldProjection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -40,6 +42,7 @@ import com.mongodb.DBObject;
|
||||
* @author Tobias Trelle
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.3
|
||||
*/
|
||||
public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
@@ -550,7 +553,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
/**
|
||||
* Generates an {@code $mod} expression that divides the value of the given field by the previously mentioned field
|
||||
* and returns the remainder.
|
||||
*
|
||||
*
|
||||
* @param fieldReference
|
||||
* @return
|
||||
*/
|
||||
@@ -564,7 +567,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
return project("size");
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@@ -620,6 +623,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
static class FieldProjection extends Projection {
|
||||
|
||||
@@ -638,7 +642,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
private FieldProjection(Field field, Object value) {
|
||||
|
||||
super(field);
|
||||
super(new ExposedField(field.getName(), true));
|
||||
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
@@ -730,7 +734,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
this.values = Arrays.asList(values);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDBObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@@ -763,6 +767,20 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
return field;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#getExposedField()
|
||||
*/
|
||||
@Override
|
||||
public ExposedField getExposedField() {
|
||||
|
||||
if (!getField().isAliased()) {
|
||||
return super.getExposedField();
|
||||
}
|
||||
|
||||
return new ExposedField(new AggregationField(getField().getName()), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of this {@link OperationProjection} with the given alias.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -38,7 +38,7 @@ public class SkipOperation implements AggregationOperation {
|
||||
/**
|
||||
* Creates a new {@link SkipOperation} skipping the given number of elements.
|
||||
*
|
||||
* @param skipCount number of documents to skip.
|
||||
* @param skipCount number of documents to skip, must not be less than zero.
|
||||
*/
|
||||
public SkipOperation(long skipCount) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 the original author or authors.
|
||||
* Copyright 2013-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,6 +34,7 @@ import com.mongodb.DBObject;
|
||||
* property references into document field names.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.3
|
||||
*/
|
||||
public class TypeBasedAggregationOperationContext implements AggregationOperationContext {
|
||||
@@ -95,7 +96,7 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(
|
||||
field.getTarget(), type);
|
||||
Field mappedField = field(propertyPath.getLeafProperty().getName(),
|
||||
Field mappedField = field(field.getName(),
|
||||
propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE));
|
||||
|
||||
return new FieldReference(new ExposedField(mappedField, true));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,7 +20,7 @@ import java.math.BigInteger;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.ConversionServiceFactory;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
@@ -46,10 +46,8 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
*
|
||||
* @param conversionService
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public AbstractMongoConverter(GenericConversionService conversionService) {
|
||||
this.conversionService = conversionService == null ? ConversionServiceFactory.createDefaultConversionService()
|
||||
: conversionService;
|
||||
this.conversionService = conversionService == null ? new DefaultConversionService() : conversionService;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -77,15 +75,13 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
*/
|
||||
private void initializeConverters() {
|
||||
|
||||
if (!conversionService.canConvert(ObjectId.class, String.class)) {
|
||||
conversionService.addConverter(ObjectIdToStringConverter.INSTANCE);
|
||||
}
|
||||
if (!conversionService.canConvert(String.class, ObjectId.class)) {
|
||||
conversionService.addConverter(StringToObjectIdConverter.INSTANCE);
|
||||
}
|
||||
conversionService.addConverter(ObjectIdToStringConverter.INSTANCE);
|
||||
conversionService.addConverter(StringToObjectIdConverter.INSTANCE);
|
||||
|
||||
if (!conversionService.canConvert(ObjectId.class, BigInteger.class)) {
|
||||
conversionService.addConverter(ObjectIdToBigIntegerConverter.INSTANCE);
|
||||
}
|
||||
|
||||
if (!conversionService.canConvert(BigInteger.class, ObjectId.class)) {
|
||||
conversionService.addConverter(BigIntegerToObjectIdConverter.INSTANCE);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -42,16 +42,6 @@ import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.ThreeTenBackPortConverters;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.NamedMongoScriptToDBObjectConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToNamedMongoScriptCoverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.DBObjectToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.StringToURLConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.TermToStringConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.URLToStringConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.util.CacheValue;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -66,6 +56,7 @@ import org.springframework.util.Assert;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class CustomConversions {
|
||||
|
||||
@@ -112,17 +103,7 @@ public class CustomConversions {
|
||||
// Add user provided converters to make sure they can override the defaults
|
||||
toRegister.addAll(converters);
|
||||
toRegister.add(CustomToStringConverter.INSTANCE);
|
||||
toRegister.add(BigDecimalToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigDecimalConverter.INSTANCE);
|
||||
toRegister.add(BigIntegerToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToBigIntegerConverter.INSTANCE);
|
||||
toRegister.add(URLToStringConverter.INSTANCE);
|
||||
toRegister.add(StringToURLConverter.INSTANCE);
|
||||
toRegister.add(DBObjectToStringConverter.INSTANCE);
|
||||
toRegister.add(TermToStringConverter.INSTANCE);
|
||||
toRegister.add(NamedMongoScriptToDBObjectConverter.INSTANCE);
|
||||
toRegister.add(DBObjectToNamedMongoScriptCoverter.INSTANCE);
|
||||
|
||||
toRegister.addAll(MongoConverters.getConvertersToRegister());
|
||||
toRegister.addAll(JodaTimeConverters.getConvertersToRegister());
|
||||
toRegister.addAll(GeoConverters.getConvertersToRegister());
|
||||
toRegister.addAll(Jsr310Converters.getConvertersToRegister());
|
||||
@@ -186,14 +167,15 @@ public class CustomConversions {
|
||||
}
|
||||
|
||||
if (!added) {
|
||||
throw new IllegalArgumentException("Given set contains element that is neither Converter nor ConverterFactory!");
|
||||
throw new IllegalArgumentException(
|
||||
"Given set contains element that is neither Converter nor ConverterFactory!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a conversion for the given converter. Inspects either generics or the {@link ConvertiblePair}s returned
|
||||
* by a {@link GenericConverter}.
|
||||
* Registers a conversion for the given converter. Inspects either generics of {@link Converter} and
|
||||
* {@link ConverterFactory} or the {@link ConvertiblePair}s returned by a {@link GenericConverter}.
|
||||
*
|
||||
* @param converter
|
||||
*/
|
||||
@@ -208,6 +190,10 @@ public class CustomConversions {
|
||||
for (ConvertiblePair pair : genericConverter.getConvertibleTypes()) {
|
||||
register(new ConverterRegistration(pair, isReading, isWriting));
|
||||
}
|
||||
} else if (converter instanceof ConverterFactory) {
|
||||
|
||||
Class<?>[] arguments = GenericTypeResolver.resolveTypeArguments(converter.getClass(), ConverterFactory.class);
|
||||
register(new ConverterRegistration(arguments[0], arguments[1], isReading, isWriting));
|
||||
} else if (converter instanceof Converter) {
|
||||
Class<?>[] arguments = GenericTypeResolver.resolveTypeArguments(converter.getClass(), Converter.class);
|
||||
register(new ConverterRegistration(arguments[0], arguments[1], isReading, isWriting));
|
||||
@@ -412,6 +398,10 @@ public class CustomConversions {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.GenericConverter#getConvertibleTypes()
|
||||
*/
|
||||
public Set<ConvertiblePair> getConvertibleTypes() {
|
||||
|
||||
ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class);
|
||||
@@ -420,6 +410,10 @@ public class CustomConversions {
|
||||
return new HashSet<ConvertiblePair>(Arrays.asList(localeToString, booleanToString));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.GenericConverter#convert(java.lang.Object, org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor)
|
||||
*/
|
||||
public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
|
||||
return source.toString();
|
||||
}
|
||||
|
||||
@@ -75,9 +75,7 @@ class DBObjectAccessor {
|
||||
String part = parts.next();
|
||||
|
||||
if (parts.hasNext()) {
|
||||
BasicDBObject nestedDbObject = new BasicDBObject();
|
||||
dbObject.put(part, nestedDbObject);
|
||||
dbObject = nestedDbObject;
|
||||
dbObject = getOrCreateNestedDbObject(part, dbObject);
|
||||
} else {
|
||||
dbObject.put(part, value);
|
||||
}
|
||||
@@ -116,8 +114,48 @@ class DBObjectAccessor {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the underlying {@link DBObject} has a value ({@literal null} or non-{@literal null}) for the given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public boolean hasValue(MongoPersistentProperty property) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return this.dbObject.containsField(fieldName);
|
||||
}
|
||||
|
||||
String[] parts = fieldName.split("\\.");
|
||||
Map<String, Object> source = this.dbObject;
|
||||
Object result = null;
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
result = source.get(parts[i - 1]);
|
||||
source = getAsMap(result);
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return source.containsKey(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given source object as map, i.e. {@link BasicDBObject}s and maps as is or {@literal null} otherwise.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<String, Object> getAsMap(Object source) {
|
||||
private static Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return (BasicDBObject) source;
|
||||
@@ -129,4 +167,26 @@ class DBObjectAccessor {
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link DBObject} which either already exists in the given source under the given key, or creates a new
|
||||
* nested one, registers it with the source and returns it.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @param source must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static DBObject getOrCreateNestedDbObject(String key, DBObject source) {
|
||||
|
||||
Object existing = source.get(key);
|
||||
|
||||
if (existing instanceof BasicDBObject) {
|
||||
return (BasicDBObject) existing;
|
||||
}
|
||||
|
||||
DBObject nested = new BasicDBObject();
|
||||
source.put(key, nested);
|
||||
|
||||
return nested;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
* Copyright 2013-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -106,7 +106,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
@Override
|
||||
public DBObject fetch(DBRef dbRef) {
|
||||
return ReflectiveDBRefResolver.fetch(mongoDbFactory.getDb(), dbRef);
|
||||
return ReflectiveDBRefResolver.fetch(mongoDbFactory, dbRef);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -180,8 +180,8 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor,
|
||||
Serializable {
|
||||
static class LazyLoadingInterceptor
|
||||
implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD;
|
||||
|
||||
@@ -387,7 +387,8 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!", translatedException);
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!",
|
||||
translatedException != null ? translatedException : ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -45,9 +45,9 @@ import com.mongodb.DBObject;
|
||||
public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implements MongoTypeMapper {
|
||||
|
||||
public static final String DEFAULT_TYPE_KEY = "_class";
|
||||
@SuppressWarnings("rawtypes")//
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final TypeInformation<List> LIST_TYPE_INFO = ClassTypeInformation.from(List.class);
|
||||
@SuppressWarnings("rawtypes")//
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final TypeInformation<Map> MAP_TYPE_INFO = ClassTypeInformation.from(Map.class);
|
||||
|
||||
private final TypeAliasAccessor<DBObject> accessor;
|
||||
@@ -58,12 +58,12 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implemen
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey) {
|
||||
this(typeKey, Arrays.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
this(typeKey, Arrays.asList(new SimpleTypeInformationMapper()));
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey, MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext) {
|
||||
this(typeKey, new DBObjectTypeAliasAccessor(typeKey), mappingContext, Arrays
|
||||
.asList(SimpleTypeInformationMapper.INSTANCE));
|
||||
this(typeKey, new DBObjectTypeAliasAccessor(typeKey), mappingContext,
|
||||
Arrays.asList(new SimpleTypeInformationMapper()));
|
||||
}
|
||||
|
||||
public DefaultMongoTypeMapper(String typeKey, List<? extends TypeInformationMapper> mappers) {
|
||||
@@ -71,7 +71,8 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<DBObject> implemen
|
||||
}
|
||||
|
||||
private DefaultMongoTypeMapper(String typeKey, TypeAliasAccessor<DBObject> accessor,
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext, List<? extends TypeInformationMapper> mappers) {
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext,
|
||||
List<? extends TypeInformationMapper> mappers) {
|
||||
|
||||
super(accessor, mappingContext, mappers);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -111,9 +111,17 @@ abstract class GeoConverters {
|
||||
@Override
|
||||
public Point convert(DBObject source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Assert.isTrue(source.keySet().size() == 2, "Source must contain 2 elements");
|
||||
|
||||
return source == null ? null : new Point((Double) source.get("x"), (Double) source.get("y"));
|
||||
if (source.containsField("type")) {
|
||||
return DbObjectToGeoJsonPointConverter.INSTANCE.convert(source);
|
||||
}
|
||||
|
||||
return new Point((Double) source.get("x"), (Double) source.get("y"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -591,7 +599,7 @@ abstract class GeoConverters {
|
||||
Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "Point"),
|
||||
String.format("Cannot convert type '%s' to Point.", source.get("type")));
|
||||
|
||||
List<Double> dbl = (List<Double>) source.get("coordinates");
|
||||
List<Number> dbl = (List<Number>) source.get("coordinates");
|
||||
return new GeoJsonPoint(dbl.get(0).doubleValue(), dbl.get(1).doubleValue());
|
||||
}
|
||||
}
|
||||
@@ -824,7 +832,7 @@ abstract class GeoConverters {
|
||||
|
||||
Assert.isInstanceOf(List.class, point);
|
||||
|
||||
List<Double> coordinatesList = (List<Double>) point;
|
||||
List<Number> coordinatesList = (List<Number>) point;
|
||||
|
||||
points.add(new GeoJsonPoint(coordinatesList.get(0).doubleValue(), coordinatesList.get(1).doubleValue()));
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
* Copyright 2011-2017 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,6 +20,7 @@ import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
@@ -29,10 +30,10 @@ import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.convert.CollectionFactory;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.Association;
|
||||
@@ -95,7 +96,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}.
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param dbRefResolver must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MappingMongoConverter(DbRefResolver dbRefResolver,
|
||||
@@ -136,8 +137,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param typeMapper the typeMapper to set
|
||||
*/
|
||||
public void setTypeMapper(MongoTypeMapper typeMapper) {
|
||||
this.typeMapper = typeMapper == null ? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY,
|
||||
mappingContext) : typeMapper;
|
||||
this.typeMapper = typeMapper == null
|
||||
? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext) : typeMapper;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -238,7 +239,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
PersistentEntityParameterValueProvider<MongoPersistentProperty> parameterProvider = new PersistentEntityParameterValueProvider<MongoPersistentProperty>(
|
||||
entity, provider, path.getCurrentObject());
|
||||
|
||||
return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider, path);
|
||||
return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider,
|
||||
path);
|
||||
}
|
||||
|
||||
private <S extends Object> S read(final MongoPersistentEntity<S> entity, final DBObject dbo, final ObjectPath path) {
|
||||
@@ -257,13 +259,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
// make sure id property is set before all other properties
|
||||
Object idValue = null;
|
||||
final DBObjectAccessor dbObjectAccessor = new DBObjectAccessor(dbo);
|
||||
|
||||
if (idProperty != null) {
|
||||
if (idProperty != null && dbObjectAccessor.hasValue(idProperty)) {
|
||||
idValue = getValueInternal(idProperty, dbo, evaluator, path);
|
||||
accessor.setProperty(idProperty, idValue);
|
||||
}
|
||||
|
||||
final ObjectPath currentPath = path.push(result, entity, idValue);
|
||||
final ObjectPath currentPath = path.push(result, entity, idValue != null ? dbObjectAccessor.get(idProperty) : null);
|
||||
|
||||
// Set properties not already set in the constructor
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
@@ -274,7 +277,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
if (!dbo.containsField(prop.getFieldName()) || entity.isConstructorArgument(prop)) {
|
||||
if (entity.isConstructorArgument(prop) || !dbObjectAccessor.hasValue(prop)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -287,9 +290,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
final MongoPersistentProperty property = association.getInverse();
|
||||
Object value = dbo.get(property.getFieldName());
|
||||
Object value = dbObjectAccessor.get(property);
|
||||
|
||||
if (value == null) {
|
||||
if (value == null || entity.isConstructorArgument(property)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -311,12 +314,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoWriter#toDBRef(java.lang.Object, org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
|
||||
*/
|
||||
public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) {
|
||||
public DBRef toDBRef(Object object, MongoPersistentProperty referringProperty) {
|
||||
|
||||
org.springframework.data.mongodb.core.mapping.DBRef annotation = null;
|
||||
|
||||
if (referingProperty != null) {
|
||||
annotation = referingProperty.getDBRef();
|
||||
if (referringProperty != null) {
|
||||
annotation = referringProperty.getDBRef();
|
||||
Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef!");
|
||||
}
|
||||
|
||||
@@ -325,14 +328,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return ((LazyLoadingProxy) object).toDBRef();
|
||||
}
|
||||
|
||||
return createDBRef(object, referingProperty);
|
||||
return createDBRef(object, referringProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
* Root entry method into write conversion. Adds a type discriminator to the {@link DBObject}. Shouldn't be called for
|
||||
* nested conversions.
|
||||
*
|
||||
* @see org.springframework.data.mongodb.core.core.convert.MongoWriter#write(java.lang.Object, com.mongodb.DBObject)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoWriter#write(java.lang.Object, com.mongodb.DBObject)
|
||||
*/
|
||||
public void write(final Object obj, final DBObject dbo) {
|
||||
|
||||
@@ -510,8 +513,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
: new BasicDBObject();
|
||||
addCustomTypeKeyIfNecessary(ClassTypeInformation.from(prop.getRawType()), obj, propDbObj);
|
||||
|
||||
MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass()) ? mappingContext
|
||||
.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type);
|
||||
MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass())
|
||||
? mappingContext.getPersistentEntity(obj.getClass()) : mappingContext.getPersistentEntity(type);
|
||||
|
||||
writeInternal(obj, propDbObj, entity);
|
||||
accessor.put(prop, propDbObj);
|
||||
@@ -700,8 +703,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (mapKeyDotReplacement == null) {
|
||||
throw new MappingException(String.format("Map key %s contains dots but no replacement was configured! Make "
|
||||
+ "sure map keys don't contain dots in the first place or configure an appropriate replacement!", source));
|
||||
throw new MappingException(String.format(
|
||||
"Map key %s contains dots but no replacement was configured! Make "
|
||||
+ "sure map keys don't contain dots in the first place or configure an appropriate replacement!",
|
||||
source));
|
||||
}
|
||||
|
||||
return source.replaceAll("\\.", mapKeyDotReplacement);
|
||||
@@ -719,8 +724,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (String) key;
|
||||
}
|
||||
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class) ? (String) getPotentiallyConvertedSimpleWrite(key)
|
||||
: key.toString();
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class)
|
||||
? (String) getPotentiallyConvertedSimpleWrite(key) : key.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -881,24 +886,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
Class<?> collectionType = targetType.getType();
|
||||
|
||||
if (sourceValue.isEmpty()) {
|
||||
return getPotentiallyConvertedSimpleRead(new HashSet<Object>(), collectionType);
|
||||
}
|
||||
|
||||
TypeInformation<?> componentType = targetType.getComponentType();
|
||||
Class<?> rawComponentType = componentType == null ? null : componentType.getType();
|
||||
|
||||
collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
|
||||
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>() : CollectionFactory
|
||||
.createCollection(collectionType, rawComponentType, sourceValue.size());
|
||||
Collection<Object> items = targetType.getType().isArray() ? new ArrayList<Object>()
|
||||
: CollectionFactory.createCollection(collectionType, rawComponentType, sourceValue.size());
|
||||
|
||||
for (int i = 0; i < sourceValue.size(); i++) {
|
||||
if (sourceValue.isEmpty()) {
|
||||
return getPotentiallyConvertedSimpleRead(items, collectionType);
|
||||
}
|
||||
|
||||
Object dbObjItem = sourceValue.get(i);
|
||||
for (Object dbObjItem : sourceValue) {
|
||||
|
||||
if (dbObjItem instanceof DBRef) {
|
||||
items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem),
|
||||
path));
|
||||
items.add(
|
||||
DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentType, readRef((DBRef) dbObjItem), path));
|
||||
} else if (dbObjItem instanceof DBObject) {
|
||||
items.add(read(componentType, (DBObject) dbObjItem, path));
|
||||
} else {
|
||||
@@ -988,20 +991,32 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (obj instanceof DBObject) {
|
||||
|
||||
DBObject newValueDbo = new BasicDBObject();
|
||||
|
||||
for (String vk : ((DBObject) obj).keySet()) {
|
||||
|
||||
Object o = ((DBObject) obj).get(vk);
|
||||
newValueDbo.put(vk, convertToMongoType(o, typeHint));
|
||||
}
|
||||
|
||||
return newValueDbo;
|
||||
}
|
||||
|
||||
if (obj instanceof Map) {
|
||||
DBObject result = new BasicDBObject();
|
||||
for (Map.Entry<Object, Object> entry : ((Map<Object, Object>) obj).entrySet()) {
|
||||
result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeHint));
|
||||
|
||||
Map<Object, Object> converted = new LinkedHashMap<Object, Object>();
|
||||
|
||||
for (Entry<Object, Object> entry : ((Map<Object, Object>) obj).entrySet()) {
|
||||
|
||||
TypeInformation<? extends Object> valueTypeHint = typeHint != null && typeHint.getMapValueType() != null
|
||||
? typeHint.getMapValueType() : typeHint;
|
||||
|
||||
converted.put(getPotentiallyConvertedSimpleWrite(entry.getKey()).toString(),
|
||||
convertToMongoType(entry.getValue(), valueTypeHint));
|
||||
}
|
||||
return result;
|
||||
|
||||
return new BasicDBObject(converted);
|
||||
}
|
||||
|
||||
if (obj.getClass().isArray()) {
|
||||
@@ -1016,10 +1031,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.write(obj, newDbo);
|
||||
|
||||
if (typeInformation == null) {
|
||||
return removeTypeInfoRecursively(newDbo);
|
||||
return removeTypeInfo(newDbo, true);
|
||||
}
|
||||
|
||||
return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfoRecursively(newDbo);
|
||||
if (typeInformation.getType().equals(NestedDocument.class)) {
|
||||
return removeTypeInfo(newDbo, false);
|
||||
}
|
||||
|
||||
return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfo(newDbo, true);
|
||||
}
|
||||
|
||||
public BasicDBList maybeConvertList(Iterable<?> source, TypeInformation<?> typeInformation) {
|
||||
@@ -1033,12 +1052,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the type information from the conversion result.
|
||||
* Removes the type information from the entire conversion result.
|
||||
*
|
||||
* @param object
|
||||
* @param recursively whether to apply the removal recursively
|
||||
* @return
|
||||
*/
|
||||
private Object removeTypeInfoRecursively(Object object) {
|
||||
private Object removeTypeInfo(Object object, boolean recursively) {
|
||||
|
||||
if (!(object instanceof DBObject)) {
|
||||
return object;
|
||||
@@ -1046,19 +1066,29 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
DBObject dbObject = (DBObject) object;
|
||||
String keyToRemove = null;
|
||||
|
||||
for (String key : dbObject.keySet()) {
|
||||
|
||||
if (typeMapper.isTypeKey(key)) {
|
||||
keyToRemove = key;
|
||||
if (recursively) {
|
||||
|
||||
Object value = dbObject.get(key);
|
||||
|
||||
if (value instanceof BasicDBList) {
|
||||
for (Object element : (BasicDBList) value) {
|
||||
removeTypeInfo(element, recursively);
|
||||
}
|
||||
} else {
|
||||
removeTypeInfo(value, recursively);
|
||||
}
|
||||
}
|
||||
|
||||
Object value = dbObject.get(key);
|
||||
if (value instanceof BasicDBList) {
|
||||
for (Object element : (BasicDBList) value) {
|
||||
removeTypeInfoRecursively(element);
|
||||
if (typeMapper.isTypeKey(key)) {
|
||||
|
||||
keyToRemove = key;
|
||||
|
||||
if (!recursively) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
removeTypeInfoRecursively(value);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1122,8 +1152,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
private class ConverterAwareSpELExpressionParameterValueProvider extends
|
||||
SpELExpressionParameterValueProvider<MongoPersistentProperty> {
|
||||
private class ConverterAwareSpELExpressionParameterValueProvider
|
||||
extends SpELExpressionParameterValueProvider<MongoPersistentProperty> {
|
||||
|
||||
private final ObjectPath path;
|
||||
|
||||
@@ -1135,7 +1165,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param delegate must not be {@literal null}.
|
||||
*/
|
||||
public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator,
|
||||
ConversionService conversionService, ParameterValueProvider<MongoPersistentProperty> delegate, ObjectPath path) {
|
||||
ConversionService conversionService, ParameterValueProvider<MongoPersistentProperty> delegate,
|
||||
ObjectPath path) {
|
||||
|
||||
super(evaluator, conversionService, delegate);
|
||||
this.path = path;
|
||||
@@ -1178,10 +1209,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
Object object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName());
|
||||
|
||||
if (object != null) {
|
||||
return (T) object;
|
||||
}
|
||||
|
||||
return (T) (object != null ? object : read(type, readRef(dbref), path));
|
||||
}
|
||||
|
||||
@@ -1194,4 +1221,15 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
DBObject readRef(DBRef ref) {
|
||||
return dbRefResolver.fetch(ref);
|
||||
}
|
||||
|
||||
/**
|
||||
* Marker class used to indicate we have a non root document object here that might be used within an update - so we
|
||||
* need to preserve type hints for potential nested elements but need to remove it on top level.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class NestedDocument {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,16 +19,26 @@ import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Currency;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.ConditionalConverter;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.converter.ConverterFactory;
|
||||
import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.core.query.Term;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -49,6 +59,36 @@ abstract class MongoConverters {
|
||||
*/
|
||||
private MongoConverters() {}
|
||||
|
||||
/**
|
||||
* Returns the converters to be registered.
|
||||
*
|
||||
* @return
|
||||
* @since 1.9
|
||||
*/
|
||||
public static Collection<Object> getConvertersToRegister() {
|
||||
|
||||
List<Object> converters = new ArrayList<Object>();
|
||||
|
||||
converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
converters.add(BigIntegerToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
converters.add(URLToStringConverter.INSTANCE);
|
||||
converters.add(StringToURLConverter.INSTANCE);
|
||||
converters.add(DBObjectToStringConverter.INSTANCE);
|
||||
converters.add(TermToStringConverter.INSTANCE);
|
||||
converters.add(NamedMongoScriptToDBObjectConverter.INSTANCE);
|
||||
converters.add(DBObjectToNamedMongoScriptCoverter.INSTANCE);
|
||||
converters.add(CurrencyToStringConverter.INSTANCE);
|
||||
converters.add(StringToCurrencyConverter.INSTANCE);
|
||||
converters.add(AtomicIntegerToIntegerConverter.INSTANCE);
|
||||
converters.add(AtomicLongToLongConverter.INSTANCE);
|
||||
converters.add(LongToAtomicLongConverter.INSTANCE);
|
||||
converters.add(IntegerToAtomicIntegerConverter.INSTANCE);
|
||||
|
||||
return converters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple singleton to convert {@link ObjectId}s to their {@link String} representation.
|
||||
*
|
||||
@@ -228,4 +268,177 @@ abstract class MongoConverters {
|
||||
return builder.get();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} implementation converting {@link Currency} into its ISO 4217 {@link String} representation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum CurrencyToStringConverter implements Converter<Currency, String> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(Currency source) {
|
||||
return source == null ? null : source.getCurrencyCode();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} implementation converting ISO 4217 {@link String} into {@link Currency}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum StringToCurrencyConverter implements Converter<String, Currency> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Currency convert(String source) {
|
||||
return StringUtils.hasText(source) ? Currency.getInstance(source) : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation using {@link NumberUtils} for number conversion and parsing. Additionally
|
||||
* deals with {@link AtomicInteger} and {@link AtomicLong} by calling {@code get()} before performing the actual
|
||||
* conversion.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.9
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum NumberToNumberConverterFactory implements ConverterFactory<Number, Number>,ConditionalConverter {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.ConverterFactory#getConverter(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T extends Number> Converter<Number, T> getConverter(Class<T> targetType) {
|
||||
return new NumberToNumberConverter<T>(targetType);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.ConditionalConverter#matches(org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor)
|
||||
*/
|
||||
@Override
|
||||
public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) {
|
||||
return !sourceType.equals(targetType);
|
||||
}
|
||||
|
||||
private final static class NumberToNumberConverter<T extends Number> implements Converter<Number, T> {
|
||||
|
||||
private final Class<T> targetType;
|
||||
|
||||
/**
|
||||
* Creates a new {@link NumberToNumberConverter} for the given target type.
|
||||
*
|
||||
* @param targetType must not be {@literal null}.
|
||||
*/
|
||||
public NumberToNumberConverter(Class<T> targetType) {
|
||||
|
||||
Assert.notNull(targetType, "Target type must not be null!");
|
||||
|
||||
this.targetType = targetType;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public T convert(Number source) {
|
||||
|
||||
if (source instanceof AtomicInteger) {
|
||||
return NumberUtils.convertNumberToTargetClass(((AtomicInteger) source).get(), this.targetType);
|
||||
}
|
||||
|
||||
if (source instanceof AtomicLong) {
|
||||
return NumberUtils.convertNumberToTargetClass(((AtomicLong) source).get(), this.targetType);
|
||||
}
|
||||
|
||||
return NumberUtils.convertNumberToTargetClass(source, this.targetType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation converting {@link AtomicLong} into {@link Long}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum AtomicLongToLongConverter implements Converter<AtomicLong, Long> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Long convert(AtomicLong source) {
|
||||
return NumberUtils.convertNumberToTargetClass(source, Long.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation converting {@link AtomicInteger} into {@link Integer}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@WritingConverter
|
||||
public static enum AtomicIntegerToIntegerConverter implements Converter<AtomicInteger, Integer> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Integer convert(AtomicInteger source) {
|
||||
return NumberUtils.convertNumberToTargetClass(source, Integer.class);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation converting {@link Long} into {@link AtomicLong}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum LongToAtomicLongConverter implements Converter<Long, AtomicLong> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public AtomicLong convert(Long source) {
|
||||
return source != null ? new AtomicLong(source) : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ConverterFactory} implementation converting {@link Integer} into {@link AtomicInteger}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@ReadingConverter
|
||||
public static enum IntegerToAtomicIntegerConverter implements Converter<Integer, AtomicInteger> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public AtomicInteger convert(Integer source) {
|
||||
return source != null ? new AtomicInteger(source) : null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,262 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.Stack;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.ExampleMatcher.NullHandler;
|
||||
import org.springframework.data.domain.ExampleMatcher.PropertyValueTransformer;
|
||||
import org.springframework.data.domain.ExampleMatcher.StringMatcher;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.MongoRegexCreator;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.data.repository.core.support.ExampleMatcherAccessor;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.8
|
||||
*/
|
||||
public class MongoExampleMapper {
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoConverter converter;
|
||||
private final Map<StringMatcher, Type> stringMatcherPartMapping = new HashMap<StringMatcher, Type>();
|
||||
|
||||
public MongoExampleMapper(MongoConverter converter) {
|
||||
|
||||
this.converter = converter;
|
||||
this.mappingContext = converter.getMappingContext();
|
||||
|
||||
stringMatcherPartMapping.put(StringMatcher.EXACT, Type.SIMPLE_PROPERTY);
|
||||
stringMatcherPartMapping.put(StringMatcher.CONTAINING, Type.CONTAINING);
|
||||
stringMatcherPartMapping.put(StringMatcher.STARTING, Type.STARTING_WITH);
|
||||
stringMatcherPartMapping.put(StringMatcher.ENDING, Type.ENDING_WITH);
|
||||
stringMatcherPartMapping.put(StringMatcher.REGEX, Type.REGEX);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link Example} as {@link DBObject} holding matching values extracted from
|
||||
* {@link Example#getProbe()}.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public DBObject getMappedExample(Example<?> example) {
|
||||
|
||||
Assert.notNull(example, "Example must not be null!");
|
||||
|
||||
return getMappedExample(example, mappingContext.getPersistentEntity(example.getProbeType()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given {@link Example} as {@link DBObject} holding matching values extracted from
|
||||
* {@link Example#getProbe()}.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @param entity must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public DBObject getMappedExample(Example<?> example, MongoPersistentEntity<?> entity) {
|
||||
|
||||
Assert.notNull(example, "Example must not be null!");
|
||||
Assert.notNull(entity, "MongoPersistentEntity must not be null!");
|
||||
|
||||
DBObject reference = (DBObject) converter.convertToMongoType(example.getProbe());
|
||||
|
||||
if (entity.hasIdProperty() && entity.getIdentifierAccessor(example.getProbe()).getIdentifier() == null) {
|
||||
reference.removeField(entity.getIdProperty().getFieldName());
|
||||
}
|
||||
|
||||
ExampleMatcherAccessor matcherAccessor = new ExampleMatcherAccessor(example.getMatcher());
|
||||
|
||||
applyPropertySpecs("", reference, example.getProbeType(), matcherAccessor);
|
||||
|
||||
this.converter.getTypeMapper().writeTypeRestrictions(reference, getTypesToMatch(example));
|
||||
|
||||
return ObjectUtils.nullSafeEquals(NullHandler.INCLUDE, matcherAccessor.getNullHandler()) ? reference
|
||||
: new BasicDBObject(SerializationUtils.flattenMap(reference));
|
||||
}
|
||||
|
||||
private Set<Class<?>> getTypesToMatch(Example<?> example) {
|
||||
|
||||
Set<Class<?>> types = new HashSet<Class<?>>();
|
||||
|
||||
for (TypeInformation<?> reference : mappingContext.getManagedTypes()) {
|
||||
if (example.getProbeType().isAssignableFrom(reference.getType())) {
|
||||
types.add(reference.getType());
|
||||
}
|
||||
}
|
||||
|
||||
return types;
|
||||
}
|
||||
|
||||
private String getMappedPropertyPath(String path, Class<?> probeType) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(probeType);
|
||||
|
||||
Iterator<String> parts = Arrays.asList(path.split("\\.")).iterator();
|
||||
|
||||
final Stack<MongoPersistentProperty> stack = new Stack<MongoPersistentProperty>();
|
||||
|
||||
List<String> resultParts = new ArrayList<String>();
|
||||
|
||||
while (parts.hasNext()) {
|
||||
|
||||
final String part = parts.next();
|
||||
MongoPersistentProperty prop = entity.getPersistentProperty(part);
|
||||
|
||||
if (prop == null) {
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty property) {
|
||||
|
||||
if (property.getFieldName().equals(part)) {
|
||||
stack.push(property);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (stack.isEmpty()) {
|
||||
return "";
|
||||
}
|
||||
prop = stack.pop();
|
||||
}
|
||||
|
||||
resultParts.add(prop.getName());
|
||||
|
||||
if (prop.isEntity() && mappingContext.hasPersistentEntityFor(prop.getActualType())) {
|
||||
entity = mappingContext.getPersistentEntity(prop.getActualType());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return StringUtils.collectionToDelimitedString(resultParts, ".");
|
||||
|
||||
}
|
||||
|
||||
private void applyPropertySpecs(String path, DBObject source, Class<?> probeType,
|
||||
ExampleMatcherAccessor exampleSpecAccessor) {
|
||||
|
||||
if (!(source instanceof BasicDBObject)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Iterator<Map.Entry<String, Object>> iter = ((BasicDBObject) source).entrySet().iterator();
|
||||
|
||||
while (iter.hasNext()) {
|
||||
|
||||
Map.Entry<String, Object> entry = iter.next();
|
||||
String propertyPath = StringUtils.hasText(path) ? path + "." + entry.getKey() : entry.getKey();
|
||||
String mappedPropertyPath = getMappedPropertyPath(propertyPath, probeType);
|
||||
|
||||
if (isEmptyIdProperty(entry)) {
|
||||
iter.remove();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (exampleSpecAccessor.isIgnoredPath(propertyPath) || exampleSpecAccessor.isIgnoredPath(mappedPropertyPath)) {
|
||||
iter.remove();
|
||||
continue;
|
||||
}
|
||||
|
||||
StringMatcher stringMatcher = exampleSpecAccessor.getDefaultStringMatcher();
|
||||
Object value = entry.getValue();
|
||||
boolean ignoreCase = exampleSpecAccessor.isIgnoreCaseEnabled();
|
||||
|
||||
if (exampleSpecAccessor.hasPropertySpecifiers()) {
|
||||
|
||||
mappedPropertyPath = exampleSpecAccessor.hasPropertySpecifier(propertyPath) ? propertyPath
|
||||
: getMappedPropertyPath(propertyPath, probeType);
|
||||
|
||||
stringMatcher = exampleSpecAccessor.getStringMatcherForPath(mappedPropertyPath);
|
||||
ignoreCase = exampleSpecAccessor.isIgnoreCaseForPath(mappedPropertyPath);
|
||||
}
|
||||
|
||||
// TODO: should a PropertySpecifier outrule the later on string matching?
|
||||
if (exampleSpecAccessor.hasPropertySpecifier(mappedPropertyPath)) {
|
||||
|
||||
PropertyValueTransformer valueTransformer = exampleSpecAccessor.getValueTransformerForPath(mappedPropertyPath);
|
||||
value = valueTransformer.convert(value);
|
||||
if (value == null) {
|
||||
iter.remove();
|
||||
continue;
|
||||
}
|
||||
|
||||
entry.setValue(value);
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof String) {
|
||||
applyStringMatcher(entry, stringMatcher, ignoreCase);
|
||||
} else if (entry.getValue() instanceof BasicDBObject) {
|
||||
applyPropertySpecs(propertyPath, (BasicDBObject) entry.getValue(), probeType, exampleSpecAccessor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isEmptyIdProperty(Entry<String, Object> entry) {
|
||||
return entry.getKey().equals("_id") && entry.getValue() == null;
|
||||
}
|
||||
|
||||
private void applyStringMatcher(Map.Entry<String, Object> entry, StringMatcher stringMatcher, boolean ignoreCase) {
|
||||
|
||||
BasicDBObject dbo = new BasicDBObject();
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(StringMatcher.DEFAULT, stringMatcher)) {
|
||||
|
||||
if (ignoreCase) {
|
||||
dbo.put("$regex", Pattern.quote((String) entry.getValue()));
|
||||
entry.setValue(dbo);
|
||||
}
|
||||
} else {
|
||||
|
||||
Type type = stringMatcherPartMapping.get(stringMatcher);
|
||||
String expression = MongoRegexCreator.INSTANCE.toRegularExpression((String) entry.getValue(), type);
|
||||
dbo.put("$regex", expression);
|
||||
entry.setValue(dbo);
|
||||
}
|
||||
|
||||
if (ignoreCase) {
|
||||
dbo.put("$options", "i");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,17 +27,22 @@ import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.InvalidPersistentPropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter.NestedDocument;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
@@ -53,19 +58,22 @@ import com.mongodb.DBRef;
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final DBObject META_TEXT_SCORE = new BasicDBObject("$meta", "textScore");
|
||||
static final ClassTypeInformation<?> NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class);
|
||||
|
||||
private enum MetaMapping {
|
||||
FORCE, WHEN_PRESENT, IGNORE;
|
||||
FORCE, WHEN_PRESENT, IGNORE
|
||||
}
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoExampleMapper exampleMapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link QueryMapper} with the given {@link MongoConverter}.
|
||||
@@ -79,6 +87,7 @@ public class QueryMapper {
|
||||
this.conversionService = converter.getConversionService();
|
||||
this.converter = converter;
|
||||
this.mappingContext = converter.getMappingContext();
|
||||
this.exampleMapper = new MongoExampleMapper(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -115,10 +124,20 @@ public class QueryMapper {
|
||||
continue;
|
||||
}
|
||||
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
Entry<String, Object> entry = getMappedObjectForField(field, query.get(key));
|
||||
try {
|
||||
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
Entry<String, Object> entry = getMappedObjectForField(field, query.get(key));
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
} catch (InvalidPersistentPropertyPath invalidPathException) {
|
||||
|
||||
// in case the object has not already been mapped
|
||||
if (!(query.get(key) instanceof DBObject)) {
|
||||
throw invalidPathException;
|
||||
}
|
||||
|
||||
result.put(key, query.get(key));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -222,7 +241,7 @@ public class QueryMapper {
|
||||
protected DBObject getMappedKeyword(Keyword keyword, MongoPersistentEntity<?> entity) {
|
||||
|
||||
// $or/$nor
|
||||
if (keyword.isOrOrNor() || keyword.hasIterableValue()) {
|
||||
if (keyword.isOrOrNor() || (keyword.hasIterableValue() && !keyword.isGeometry())) {
|
||||
|
||||
Iterable<?> conditions = keyword.getValue();
|
||||
BasicDBList newConditions = new BasicDBList();
|
||||
@@ -235,6 +254,10 @@ public class QueryMapper {
|
||||
return new BasicDBObject(keyword.getKey(), newConditions);
|
||||
}
|
||||
|
||||
if (keyword.isSample()) {
|
||||
return exampleMapper.getMappedExample(keyword.<Example<?>> getValue(), entity);
|
||||
}
|
||||
|
||||
return new BasicDBObject(keyword.getKey(), convertSimpleOrDBObject(keyword.getValue(), entity));
|
||||
}
|
||||
|
||||
@@ -250,8 +273,8 @@ public class QueryMapper {
|
||||
boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists();
|
||||
Object value = keyword.getValue();
|
||||
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property) : getMappedValue(
|
||||
property.with(keyword.getKey()), value);
|
||||
Object convertedValue = needsAssociationConversion ? convertAssociation(value, property)
|
||||
: getMappedValue(property.with(keyword.getKey()), value);
|
||||
|
||||
return new BasicDBObject(keyword.key, convertedValue);
|
||||
}
|
||||
@@ -294,7 +317,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
if (isNestedKeyword(value)) {
|
||||
return getMappedKeyword(new Keyword((DBObject) value), null);
|
||||
return getMappedKeyword(new Keyword((DBObject) value), documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
if (isAssociationConversionNecessary(documentField, value)) {
|
||||
@@ -473,8 +496,8 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
try {
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService
|
||||
.convert(id, ObjectId.class) : delegateConvertToMongoType(id, null);
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService.convert(id, ObjectId.class)
|
||||
: delegateConvertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
@@ -552,6 +575,26 @@ public class QueryMapper {
|
||||
return key.matches(N_OR_PATTERN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the current keyword is the {@code $geometry} keyword.
|
||||
*
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
public boolean isGeometry() {
|
||||
return "$geometry".equalsIgnoreCase(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns wheter the current keyword indicates a sample object.
|
||||
*
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
public boolean isSample() {
|
||||
return "$sample".equalsIgnoreCase(key);
|
||||
}
|
||||
|
||||
public boolean hasIterableValue() {
|
||||
return value instanceof Iterable;
|
||||
}
|
||||
@@ -657,6 +700,10 @@ public class QueryMapper {
|
||||
public Association<MongoPersistentProperty> getAssociation() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public TypeInformation<?> getTypeHint() {
|
||||
return ClassTypeInformation.OBJECT;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -816,7 +863,7 @@ public class QueryMapper {
|
||||
|
||||
try {
|
||||
|
||||
PropertyPath path = PropertyPath.from(pathExpression, entity.getTypeInformation());
|
||||
PropertyPath path = PropertyPath.from(pathExpression.replaceAll("\\.\\d", ""), entity.getTypeInformation());
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
@@ -849,7 +896,7 @@ public class QueryMapper {
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return PropertyToFieldNameConverter.INSTANCE;
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -862,6 +909,104 @@ public class QueryMapper {
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new AssociationConverter(getAssociation());
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class PositionParameterRetainingPropertyKeyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final KeyMapper keyMapper;
|
||||
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey) {
|
||||
this.keyMapper = new KeyMapper(rawKey);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
return keyMapper.mapPropertyName(source);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTypeHint()
|
||||
*/
|
||||
@Override
|
||||
public TypeInformation<?> getTypeHint() {
|
||||
|
||||
MongoPersistentProperty property = getProperty();
|
||||
|
||||
if (property == null) {
|
||||
return super.getTypeHint();
|
||||
}
|
||||
|
||||
if (property.getActualType().isInterface()
|
||||
|| java.lang.reflect.Modifier.isAbstract(property.getActualType().getModifiers())) {
|
||||
return ClassTypeInformation.OBJECT;
|
||||
}
|
||||
|
||||
return NESTED_DOCUMENT;
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.8
|
||||
*/
|
||||
static class KeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
public KeyMapper(String key) {
|
||||
|
||||
this.iterator = Arrays.asList(key.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property));
|
||||
boolean inspect = iterator.hasNext();
|
||||
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
|
||||
if (isPositional) {
|
||||
mappedName.append(".").append(partial);
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
}
|
||||
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
private static boolean isPositionalParameter(String partial) {
|
||||
|
||||
if ("$".equals(partial)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
Long.valueOf(partial);
|
||||
return true;
|
||||
} catch (NumberFormatException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -20,9 +20,9 @@ import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
@@ -51,12 +51,14 @@ class ReflectiveDBRefResolver {
|
||||
* @param ref must not be {@literal null}.
|
||||
* @return the document that this references.
|
||||
*/
|
||||
public static DBObject fetch(DB db, DBRef ref) {
|
||||
public static DBObject fetch(MongoDbFactory factory, DBRef ref) {
|
||||
|
||||
Assert.notNull(ref, "DBRef to fetch must not be null!");
|
||||
|
||||
if (isMongo3Driver()) {
|
||||
return db.getCollection(ref.getCollectionName()).findOne(ref.getId());
|
||||
|
||||
Assert.notNull(factory, "DbFactory to fetch DB from must not be null!");
|
||||
return factory.getDb().getCollection(ref.getCollectionName()).findOne(ref.getId());
|
||||
}
|
||||
|
||||
return (DBObject) invokeMethod(FETCH_METHOD, ref);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2014 the original author or authors.
|
||||
* Copyright 2013-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,8 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
@@ -24,12 +22,11 @@ import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifier;
|
||||
import org.springframework.data.mongodb.core.query.Update.Modifiers;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
@@ -65,8 +62,8 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Object delegateConvertToMongoType(Object source, MongoPersistentEntity<?> entity) {
|
||||
return entity == null ? super.delegateConvertToMongoType(source, null) : converter.convertToMongoType(source,
|
||||
entity.getTypeInformation());
|
||||
return converter.convertToMongoType(source,
|
||||
entity == null ? ClassTypeInformation.OBJECT : getTypeHintForEntity(source, entity));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -89,7 +86,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
return getMappedUpdateModifier(field, rawValue);
|
||||
}
|
||||
|
||||
return super.getMappedObjectForField(field, getMappedValue(field, rawValue));
|
||||
return super.getMappedObjectForField(field, rawValue);
|
||||
}
|
||||
|
||||
private Entry<String, Object> getMappedUpdateModifier(Field field, Object rawValue) {
|
||||
@@ -97,14 +94,14 @@ public class UpdateMapper extends QueryMapper {
|
||||
|
||||
if (rawValue instanceof Modifier) {
|
||||
|
||||
value = getMappedValue((Modifier) rawValue);
|
||||
value = getMappedValue(field, (Modifier) rawValue);
|
||||
|
||||
} else if (rawValue instanceof Modifiers) {
|
||||
|
||||
DBObject modificationOperations = new BasicDBObject();
|
||||
|
||||
for (Modifier modifier : ((Modifiers) rawValue).getModifiers()) {
|
||||
modificationOperations.putAll(getMappedValue(modifier).toMap());
|
||||
modificationOperations.putAll(getMappedValue(field, modifier).toMap());
|
||||
}
|
||||
|
||||
value = modificationOperations;
|
||||
@@ -132,12 +129,30 @@ public class UpdateMapper extends QueryMapper {
|
||||
return value instanceof Query;
|
||||
}
|
||||
|
||||
private DBObject getMappedValue(Modifier modifier) {
|
||||
private DBObject getMappedValue(Field field, Modifier modifier) {
|
||||
|
||||
Object value = converter.convertToMongoType(modifier.getValue(), ClassTypeInformation.OBJECT);
|
||||
TypeInformation<?> typeHint = field == null ? ClassTypeInformation.OBJECT : field.getTypeHint();
|
||||
|
||||
Object value = converter.convertToMongoType(modifier.getValue(), typeHint);
|
||||
return new BasicDBObject(modifier.getKey(), value);
|
||||
}
|
||||
|
||||
private TypeInformation<?> getTypeHintForEntity(Object source, MongoPersistentEntity<?> entity) {
|
||||
|
||||
TypeInformation<?> info = entity.getTypeInformation();
|
||||
Class<?> type = info.getActualType().getType();
|
||||
|
||||
if (source == null || type.isInterface() || java.lang.reflect.Modifier.isAbstract(type.getModifiers())) {
|
||||
return info;
|
||||
}
|
||||
|
||||
if (!type.equals(source.getClass())) {
|
||||
return info;
|
||||
}
|
||||
|
||||
return NESTED_DOCUMENT;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext)
|
||||
@@ -146,8 +161,8 @@ public class UpdateMapper extends QueryMapper {
|
||||
protected Field createPropertyField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext) : //
|
||||
new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
return entity == null ? super.createPropertyField(entity, key, mappingContext)
|
||||
: new MetadataBackedUpdateField(entity, key, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -194,7 +209,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new UpdatePropertyConverter(key);
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -206,71 +221,6 @@ public class UpdateMapper extends QueryMapper {
|
||||
return new UpdateAssociationConverter(getAssociation(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Special mapper handling positional parameter {@literal $} within property names.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
*/
|
||||
private static class UpdateKeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
protected UpdateKeyMapper(String rawKey) {
|
||||
|
||||
Assert.hasText(rawKey, "Key must not be null or empty!");
|
||||
|
||||
this.iterator = Arrays.asList(rawKey.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
* @param property
|
||||
* @return
|
||||
*/
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
String mappedName = PropertyToFieldNameConverter.INSTANCE.convert(property);
|
||||
return iterator.hasNext() && iterator.next().equals("$") ? String.format("%s.$", mappedName) : mappedName;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Special {@link Converter} for {@link MongoPersistentProperty} instances that will concatenate the {@literal $}
|
||||
* contained in the source update key.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class UpdatePropertyConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link UpdatePropertyConverter} with the given update key.
|
||||
*
|
||||
* @param updateKey must not be {@literal null} or empty.
|
||||
*/
|
||||
public UpdatePropertyConverter(String updateKey) {
|
||||
|
||||
Assert.hasText(updateKey, "Update key must not be null or empty!");
|
||||
|
||||
this.mapper = new UpdateKeyMapper(updateKey);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public String convert(MongoPersistentProperty property) {
|
||||
return mapper.mapPropertyName(property);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Converter} retaining positional parameter {@literal $} for {@link Association}s.
|
||||
*
|
||||
@@ -278,7 +228,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
protected static class UpdateAssociationConverter extends AssociationConverter {
|
||||
|
||||
private final UpdateKeyMapper mapper;
|
||||
private final KeyMapper mapper;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AssociationConverter} for the given {@link Association}.
|
||||
@@ -288,7 +238,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
public UpdateAssociationConverter(Association<MongoPersistentProperty> association, String key) {
|
||||
|
||||
super(association);
|
||||
this.mapper = new UpdateKeyMapper(key);
|
||||
this.mapper = new KeyMapper(key);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -27,8 +27,9 @@ import java.lang.annotation.Target;
|
||||
* @author Laurent Canet
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface GeoSpatialIndexed {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -39,6 +39,7 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
private GeoSpatialIndexType type = GeoSpatialIndexType.GEO_2D;
|
||||
private Double bucketSize = 1.0;
|
||||
private String additionalField;
|
||||
private IndexFilter filter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeospatialIndex} for the given field.
|
||||
@@ -119,6 +120,22 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
|
||||
*
|
||||
* @param filter can be {@literal null}.
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/index-partial/">https://docs.mongodb.com/manual/core/index-partial/</a>
|
||||
* @since 1.10
|
||||
*/
|
||||
public GeospatialIndex partial(IndexFilter filter) {
|
||||
|
||||
this.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
@@ -186,6 +203,10 @@ public class GeospatialIndex implements IndexDefinition {
|
||||
break;
|
||||
}
|
||||
|
||||
if (filter != null) {
|
||||
dbo.put("partialFilterExpression", filter.getFilterObject());
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
@@ -44,7 +45,7 @@ public class Index implements IndexDefinition {
|
||||
*
|
||||
* @deprecated since 1.7.
|
||||
*/
|
||||
@Deprecated//
|
||||
@Deprecated //
|
||||
DROP
|
||||
}
|
||||
|
||||
@@ -62,6 +63,8 @@ public class Index implements IndexDefinition {
|
||||
|
||||
private long expire = -1;
|
||||
|
||||
private IndexFilter filter;
|
||||
|
||||
public Index() {}
|
||||
|
||||
public Index(String key, Direction direction) {
|
||||
@@ -175,11 +178,33 @@ public class Index implements IndexDefinition {
|
||||
return unique();
|
||||
}
|
||||
|
||||
/**
|
||||
* Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
|
||||
*
|
||||
* @param filter can be {@literal null}.
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/index-partial/">https://docs.mongodb.com/manual/core/index-partial/</a>
|
||||
* @since 1.10
|
||||
*/
|
||||
public Index partial(IndexFilter filter) {
|
||||
|
||||
this.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys()
|
||||
*/
|
||||
public DBObject getIndexKeys() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : fieldSpec.keySet()) {
|
||||
dbo.put(k, fieldSpec.get(k).equals(Direction.ASC) ? 1 : -1);
|
||||
|
||||
for (Entry<String, Direction> entry : fieldSpec.entrySet()) {
|
||||
dbo.put(entry.getKey(), Direction.ASC.equals(entry.getValue()) ? 1 : -1);
|
||||
}
|
||||
|
||||
return dbo;
|
||||
}
|
||||
|
||||
@@ -205,6 +230,9 @@ public class Index implements IndexDefinition {
|
||||
dbo.put("expireAfterSeconds", expire);
|
||||
}
|
||||
|
||||
if (filter != null) {
|
||||
dbo.put("partialFilterExpression", filter.getFilterObject());
|
||||
}
|
||||
return dbo;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Use {@link IndexFilter} to create the partial filter expression used when creating
|
||||
* <a href="https://docs.mongodb.com/manual/core/index-partial/">Partial Indexes</a>.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
public interface IndexFilter {
|
||||
|
||||
/**
|
||||
* Get the raw (unmapped) filter expression.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
DBObject getFilterObject();
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2014 the original author or authors.
|
||||
* Copyright 2002-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,7 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -23,6 +26,8 @@ import java.util.List;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
@@ -30,6 +35,10 @@ import org.springframework.util.ObjectUtils;
|
||||
*/
|
||||
public class IndexInfo {
|
||||
|
||||
private static final Double ONE = Double.valueOf(1);
|
||||
private static final Double MINUS_ONE = Double.valueOf(-1);
|
||||
private static final Collection<String> TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere");
|
||||
|
||||
private final List<IndexField> indexFields;
|
||||
|
||||
private final String name;
|
||||
@@ -37,6 +46,7 @@ public class IndexInfo {
|
||||
private final boolean dropDuplicates;
|
||||
private final boolean sparse;
|
||||
private final String language;
|
||||
private String partialFilterExpression;
|
||||
|
||||
/**
|
||||
* @deprecated Will be removed in 1.7. Please use {@link #IndexInfo(List, String, boolean, boolean, boolean, String)}
|
||||
@@ -62,6 +72,64 @@ public class IndexInfo {
|
||||
this.language = language;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link IndexInfo} parsing required properties from the given {@literal sourceDocument}.
|
||||
*
|
||||
* @param sourceDocument
|
||||
* @return
|
||||
* @since 1.10
|
||||
*/
|
||||
public static IndexInfo indexInfoOf(DBObject sourceDocument) {
|
||||
|
||||
DBObject keyDbObject = (DBObject) sourceDocument.get("key");
|
||||
int numberOfElements = keyDbObject.keySet().size();
|
||||
|
||||
List<IndexField> indexFields = new ArrayList<IndexField>(numberOfElements);
|
||||
|
||||
for (String key : keyDbObject.keySet()) {
|
||||
|
||||
Object value = keyDbObject.get(key);
|
||||
|
||||
if (TWO_D_IDENTIFIERS.contains(value)) {
|
||||
|
||||
indexFields.add(IndexField.geo(key));
|
||||
|
||||
} else if ("text".equals(value)) {
|
||||
|
||||
DBObject weights = (DBObject) sourceDocument.get("weights");
|
||||
|
||||
for (String fieldName : weights.keySet()) {
|
||||
indexFields.add(IndexField.text(fieldName, Float.valueOf(weights.get(fieldName).toString())));
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String name = sourceDocument.get("name").toString();
|
||||
|
||||
boolean unique = sourceDocument.containsField("unique") ? (Boolean) sourceDocument.get("unique") : false;
|
||||
boolean dropDuplicates = sourceDocument.containsField("dropDups") ? (Boolean) sourceDocument.get("dropDups")
|
||||
: false;
|
||||
boolean sparse = sourceDocument.containsField("sparse") ? (Boolean) sourceDocument.get("sparse") : false;
|
||||
String language = sourceDocument.containsField("default_language") ? (String) sourceDocument.get("default_language")
|
||||
: "";
|
||||
String partialFilter = sourceDocument.containsField("partialFilterExpression")
|
||||
? sourceDocument.get("partialFilterExpression").toString() : "";
|
||||
|
||||
IndexInfo info = new IndexInfo(indexFields, name, unique, dropDuplicates, sparse, language);
|
||||
info.partialFilterExpression = partialFilter;
|
||||
return info;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the individual index fields of the index.
|
||||
*
|
||||
@@ -113,10 +181,19 @@ public class IndexInfo {
|
||||
return language;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @since 1.0
|
||||
*/
|
||||
public String getPartialFilterExpression() {
|
||||
return partialFilterExpression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", dropDuplicates="
|
||||
+ dropDuplicates + ", sparse=" + sparse + ", language=" + language + "]";
|
||||
+ dropDuplicates + ", sparse=" + sparse + ", language=" + language + ", partialFilterExpression="
|
||||
+ partialFilterExpression + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -130,6 +207,7 @@ public class IndexInfo {
|
||||
result = prime * result + (sparse ? 1231 : 1237);
|
||||
result = prime * result + (unique ? 1231 : 1237);
|
||||
result = prime * result + ObjectUtils.nullSafeHashCode(language);
|
||||
result = prime * result + ObjectUtils.nullSafeHashCode(partialFilterExpression);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -171,6 +249,9 @@ public class IndexInfo {
|
||||
if (!ObjectUtils.nullSafeEquals(language, other.language)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(partialFilterExpression, other.partialFilterExpression)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,22 +16,24 @@
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
interface IndexResolver {
|
||||
|
||||
/**
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@code type}. {@link IndexDefinition}s are created
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s are created
|
||||
* for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @param type
|
||||
* @param typeInformation
|
||||
* @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type.
|
||||
*/
|
||||
Iterable<? extends IndexDefinitionHolder> resolveIndexForClass(Class<?> type);
|
||||
Iterable<? extends IndexDefinitionHolder> resolveIndexFor(TypeInformation<?> typeInformation);
|
||||
|
||||
}
|
||||
|
||||
@@ -29,8 +29,9 @@ import java.lang.annotation.Target;
|
||||
* @author Johno Crawford
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
*/
|
||||
@Target(ElementType.FIELD)
|
||||
@Target({ElementType.ANNOTATION_TYPE, ElementType.FIELD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface Indexed {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -60,4 +60,10 @@ public class MongoMappingEventPublisher implements ApplicationEventPublisher {
|
||||
indexCreator.onApplicationEvent((MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>) event);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationEventPublisher#publishEvent(java.lang.Object)
|
||||
*/
|
||||
public void publishEvent(Object event) {}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,6 +21,7 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
@@ -29,8 +30,12 @@ import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexRes
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
/**
|
||||
* Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext}
|
||||
@@ -43,8 +48,7 @@ import org.springframework.util.Assert;
|
||||
* @author Laurent Canet
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements
|
||||
ApplicationListener<MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
public class MongoPersistentEntityIndexCreator implements ApplicationListener<MappingContextEvent<?, ?>> {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
|
||||
@@ -54,7 +58,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
private final IndexResolver indexResolver;
|
||||
|
||||
/**
|
||||
* Creats a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
@@ -65,7 +69,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
}
|
||||
|
||||
/**
|
||||
* Creats a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
@@ -92,7 +96,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
public void onApplicationEvent(MappingContextEvent<MongoPersistentEntity<?>, MongoPersistentProperty> event) {
|
||||
public void onApplicationEvent(MappingContextEvent<?, ?> event) {
|
||||
|
||||
if (!event.wasEmittedBy(mappingContext)) {
|
||||
return;
|
||||
@@ -102,7 +106,7 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
|
||||
// Double check type as Spring infrastructure does not consider nested generics
|
||||
if (entity instanceof MongoPersistentEntity) {
|
||||
checkForIndexes(event.getPersistentEntity());
|
||||
checkForIndexes((MongoPersistentEntity<?>) entity);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,15 +129,40 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
private void checkForAndCreateIndexes(MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.findAnnotation(Document.class) != null) {
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexForClass(entity.getType())) {
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
createIndex(indexToCreate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection())
|
||||
.createIndex(indexDefinition.getIndexKeys(), indexDefinition.getIndexOptions());
|
||||
void createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
try {
|
||||
|
||||
mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).createIndex(indexDefinition.getIndexKeys(),
|
||||
indexDefinition.getIndexOptions());
|
||||
|
||||
} catch (MongoException ex) {
|
||||
|
||||
if (MongoDbErrorCodes.isDataIntegrityViolationCode(ex.getCode())) {
|
||||
|
||||
DBObject existingIndex = fetchIndexInformation(indexDefinition);
|
||||
String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'.";
|
||||
|
||||
if (existingIndex != null) {
|
||||
message += " Index already defined as '%s'.";
|
||||
}
|
||||
|
||||
throw new DataIntegrityViolationException(
|
||||
String.format(message, indexDefinition.getPath(), indexDefinition.getCollection(),
|
||||
indexDefinition.getIndexKeys(), indexDefinition.getIndexOptions(), existingIndex),
|
||||
ex);
|
||||
}
|
||||
|
||||
RuntimeException exceptionToThrow = mongoDbFactory.getExceptionTranslator().translateExceptionIfPossible(ex);
|
||||
|
||||
throw exceptionToThrow != null ? exceptionToThrow : ex;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -145,4 +174,28 @@ public class MongoPersistentEntityIndexCreator implements
|
||||
public boolean isIndexCreatorFor(MappingContext<?, ?> context) {
|
||||
return this.mappingContext.equals(context);
|
||||
}
|
||||
|
||||
private DBObject fetchIndexInformation(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
if (indexDefinition == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
Object indexNameToLookUp = indexDefinition.getIndexOptions().get("name");
|
||||
|
||||
for (DBObject index : mongoDbFactory.getDb().getCollection(indexDefinition.getCollection()).getIndexInfo()) {
|
||||
if (ObjectUtils.nullSafeEquals(indexNameToLookUp, index.get("name"))) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug(
|
||||
String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()), e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,7 +27,10 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.model.MappingException;
|
||||
import org.springframework.data.mongodb.core.index.Index.Duplicates;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.TextIndexIncludeOptions.IncludeStrategy;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder;
|
||||
@@ -36,6 +39,7 @@ import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -51,6 +55,7 @@ import com.mongodb.util.JSON;
|
||||
* scanning related annotations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @since 1.5
|
||||
*/
|
||||
public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
@@ -70,13 +75,12 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
this.mappingContext = mappingContext;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexResolver#resolveIndexForClass(java.lang.Class)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexResolver#resolveIndexForClass(org.springframework.data.util.TypeInformation)
|
||||
*/
|
||||
@Override
|
||||
public List<IndexDefinitionHolder> resolveIndexForClass(Class<?> type) {
|
||||
return resolveIndexForEntity(mappingContext.getPersistentEntity(type));
|
||||
public Iterable<? extends IndexDefinitionHolder> resolveIndexFor(TypeInformation<?> typeInformation) {
|
||||
return resolveIndexForEntity(mappingContext.getPersistentEntity(typeInformation));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -107,7 +111,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
try {
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(),
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
persistentProperty.getFieldName(), root.getCollection(), guard));
|
||||
}
|
||||
|
||||
@@ -122,6 +126,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
});
|
||||
|
||||
indexInformation.addAll(resolveIndexesForDbrefs("", root.getCollection(), root));
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
@@ -134,7 +140,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
* @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property
|
||||
* types. Will never be {@code null}.
|
||||
*/
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final Class<?> type, final String path,
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final TypeInformation<?> type, final String path,
|
||||
final String collection, final CycleGuard guard) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
@@ -152,8 +158,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
try {
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getActualType(), propertyDotPath,
|
||||
collection, guard));
|
||||
indexInformation.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
propertyDotPath, collection, guard));
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
@@ -167,6 +173,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
});
|
||||
|
||||
indexInformation.addAll(resolveIndexesForDbrefs(path, collection, entity));
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
@@ -192,18 +200,19 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return createCompoundIndexDefinitions(dotPath, collection, entity);
|
||||
}
|
||||
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(MongoPersistentEntity<?> root) {
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(
|
||||
MongoPersistentEntity<?> root) {
|
||||
|
||||
TextIndexDefinitionBuilder indexDefinitionBuilder = new TextIndexDefinitionBuilder().named(root.getType()
|
||||
.getSimpleName() + "_TextIndex");
|
||||
TextIndexDefinitionBuilder indexDefinitionBuilder = new TextIndexDefinitionBuilder()
|
||||
.named(root.getType().getSimpleName() + "_TextIndex");
|
||||
|
||||
if (StringUtils.hasText(root.getLanguage())) {
|
||||
indexDefinitionBuilder.withDefaultLanguage(root.getLanguage());
|
||||
}
|
||||
|
||||
try {
|
||||
appendTextIndexInformation("", indexDefinitionBuilder, root,
|
||||
new TextIndexIncludeOptions(IncludeStrategy.DEFAULT), new CycleGuard());
|
||||
appendTextIndexInformation("", indexDefinitionBuilder, root, new TextIndexIncludeOptions(IncludeStrategy.DEFAULT),
|
||||
new CycleGuard());
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
@@ -219,9 +228,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
}
|
||||
|
||||
private void appendTextIndexInformation(final String dotPath,
|
||||
final TextIndexDefinitionBuilder indexDefinitionBuilder, final MongoPersistentEntity<?> entity,
|
||||
final TextIndexIncludeOptions includeOptions, final CycleGuard guard) {
|
||||
private void appendTextIndexInformation(final String dotPath, final TextIndexDefinitionBuilder indexDefinitionBuilder,
|
||||
final MongoPersistentEntity<?> entity, final TextIndexIncludeOptions includeOptions, final CycleGuard guard) {
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@@ -248,8 +256,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
TextIndexIncludeOptions optionsForNestedType = includeOptions;
|
||||
if (!IncludeStrategy.FORCE.equals(includeOptions.getStrategy()) && indexed != null) {
|
||||
optionsForNestedType = new TextIndexIncludeOptions(IncludeStrategy.FORCE, new TextIndexedFieldSpec(
|
||||
propertyDotPath, weight));
|
||||
optionsForNestedType = new TextIndexIncludeOptions(IncludeStrategy.FORCE,
|
||||
new TextIndexedFieldSpec(propertyDotPath, weight));
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -258,9 +266,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage(), e);
|
||||
} catch (InvalidDataAccessApiUsageException e) {
|
||||
LOGGER.info(
|
||||
String.format("Potentially invald index structure discovered. Breaking operation for %s.",
|
||||
entity.getName()), e);
|
||||
LOGGER.info(String.format("Potentially invalid index structure discovered. Breaking operation for %s.",
|
||||
entity.getName()), e);
|
||||
}
|
||||
} else if (includeOptions.isForce() || indexed != null) {
|
||||
indexDefinitionBuilder.onField(propertyDotPath, weight);
|
||||
@@ -305,8 +312,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String fallbackCollection,
|
||||
CompoundIndex index, MongoPersistentEntity<?> entity) {
|
||||
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(resolveCompoundIndexKeyFromStringDefinition(
|
||||
dotPath, index.def()));
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(
|
||||
resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def()));
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null));
|
||||
@@ -430,13 +437,45 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
if (StringUtils.hasText(dotPath)) {
|
||||
|
||||
nameToUse = StringUtils.hasText(nameToUse) ? (property != null ? dotPath.replace("." + property.getFieldName(),
|
||||
"") : dotPath) + "." + nameToUse : dotPath;
|
||||
nameToUse = StringUtils.hasText(nameToUse)
|
||||
? (property != null ? dotPath.replace("." + property.getFieldName(), "") : dotPath) + "." + nameToUse
|
||||
: dotPath;
|
||||
}
|
||||
return nameToUse;
|
||||
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> resolveIndexesForDbrefs(final String path, final String collection,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
final List<IndexDefinitionHolder> indexes = new ArrayList<IndexDefinitionHolder>(0);
|
||||
entity.doWithAssociations(new AssociationHandler<MongoPersistentProperty>() {
|
||||
|
||||
@Override
|
||||
public void doWithAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
MongoPersistentProperty property = association.getInverse();
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(path) ? path + "." : "") + property.getFieldName();
|
||||
|
||||
if (property.isAnnotationPresent(GeoSpatialIndexed.class) || property.isAnnotationPresent(TextIndexed.class)) {
|
||||
throw new MappingException(
|
||||
String.format("Cannot create geospatial-/text- index on DBRef in collection '%s' for path '%s'.",
|
||||
collection, propertyDotPath));
|
||||
}
|
||||
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath,
|
||||
collection, property);
|
||||
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return indexes;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used
|
||||
* to detect potential cycles within the references.
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2016. the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link IndexFilter} implementation for usage with plain {@link DBObject} as well as {@link CriteriaDefinition} filter
|
||||
* expressions.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
*/
|
||||
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
|
||||
public class PartialIndexFilter implements IndexFilter {
|
||||
|
||||
private final @NonNull Object filterExpression;
|
||||
|
||||
/**
|
||||
* Create new {@link PartialIndexFilter} for given {@link DBObject filter expression}.
|
||||
*
|
||||
* @param where must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static PartialIndexFilter of(DBObject where) {
|
||||
return new PartialIndexFilter(where);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link PartialIndexFilter} for given {@link CriteriaDefinition filter expression}.
|
||||
*
|
||||
* @param where must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static PartialIndexFilter of(CriteriaDefinition where) {
|
||||
return new PartialIndexFilter(where);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexFilter#getFilterObject()
|
||||
*/
|
||||
public DBObject getFilterObject() {
|
||||
|
||||
if (filterExpression instanceof DBObject) {
|
||||
return (DBObject) filterExpression;
|
||||
}
|
||||
|
||||
if (filterExpression instanceof CriteriaDefinition) {
|
||||
return ((CriteriaDefinition) filterExpression).getCriteriaObject();
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Unknown type %s used as filter expression.", filterExpression.getClass()));
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -39,6 +39,7 @@ public class TextIndexDefinition implements IndexDefinition {
|
||||
private Set<TextIndexedFieldSpec> fieldSpecs;
|
||||
private String defaultLanguage;
|
||||
private String languageOverride;
|
||||
private IndexFilter filter;
|
||||
|
||||
TextIndexDefinition() {
|
||||
fieldSpecs = new LinkedHashSet<TextIndexedFieldSpec>();
|
||||
@@ -129,6 +130,10 @@ public class TextIndexDefinition implements IndexDefinition {
|
||||
options.put("language_override", languageOverride);
|
||||
}
|
||||
|
||||
if (filter != null) {
|
||||
options.put("partialFilterExpression", filter.getFilterObject());
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@@ -288,8 +293,8 @@ public class TextIndexDefinition implements IndexDefinition {
|
||||
public TextIndexDefinitionBuilder onField(String fieldname, Float weight) {
|
||||
|
||||
if (this.instance.fieldSpecs.contains(ALL_FIELDS)) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format("Cannot add %s to field spec for all fields.",
|
||||
fieldname));
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
String.format("Cannot add %s to field spec for all fields.", fieldname));
|
||||
}
|
||||
|
||||
this.instance.fieldSpecs.add(new TextIndexedFieldSpec(fieldname, weight));
|
||||
@@ -318,15 +323,30 @@ public class TextIndexDefinition implements IndexDefinition {
|
||||
public TextIndexDefinitionBuilder withLanguageOverride(String fieldname) {
|
||||
|
||||
if (StringUtils.hasText(this.instance.languageOverride)) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format(
|
||||
"Cannot set language override on %s as it is already defined on %s.", fieldname,
|
||||
this.instance.languageOverride));
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
String.format("Cannot set language override on %s as it is already defined on %s.", fieldname,
|
||||
this.instance.languageOverride));
|
||||
}
|
||||
|
||||
this.instance.languageOverride = fieldname;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only index the documents that meet the specified {@link IndexFilter filter expression}.
|
||||
*
|
||||
* @param filter can be {@literal null}.
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/index-partial/">https://docs.mongodb.com/manual/core/index-partial/</a>
|
||||
* @since 1.10
|
||||
*/
|
||||
public TextIndexDefinitionBuilder partial(IndexFilter filter) {
|
||||
|
||||
this.instance.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TextIndexDefinition build() {
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,10 +26,11 @@ import java.lang.annotation.Target;
|
||||
* all fields marked with {@link TextIndexed} are combined into one single index. <br />
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.6
|
||||
*/
|
||||
@Documented
|
||||
@Target({ ElementType.FIELD })
|
||||
@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface TextIndexed {
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
@@ -76,7 +77,7 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
Class<?> rawType = typeInformation.getType();
|
||||
String fallback = MongoCollectionUtils.getPreferredCollectionName(rawType);
|
||||
|
||||
Document document = rawType.getAnnotation(Document.class);
|
||||
Document document = this.findAnnotation(Document.class);
|
||||
|
||||
this.expression = detectExpression(document);
|
||||
this.context = new StandardEvaluationContext();
|
||||
@@ -305,28 +306,44 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
*/
|
||||
private static class PropertyTypeAssertionHandler implements PropertyHandler<MongoPersistentProperty> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.PropertyHandler#doWithPersistentProperty(org.springframework.data.mapping.PersistentProperty)
|
||||
*/
|
||||
@Override
|
||||
public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
potentiallyAssertTextScoreType(persistentProperty);
|
||||
potentiallyAssertLanguageType(persistentProperty);
|
||||
potentiallyAssertDBRefTargetType(persistentProperty);
|
||||
}
|
||||
|
||||
private void potentiallyAssertLanguageType(MongoPersistentProperty persistentProperty) {
|
||||
private static void potentiallyAssertLanguageType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isExplicitLanguageProperty()) {
|
||||
assertPropertyType(persistentProperty, String.class);
|
||||
}
|
||||
}
|
||||
|
||||
private void potentiallyAssertTextScoreType(MongoPersistentProperty persistentProperty) {
|
||||
private static void potentiallyAssertTextScoreType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isTextScoreProperty()) {
|
||||
assertPropertyType(persistentProperty, Float.class, Double.class);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertPropertyType(MongoPersistentProperty persistentProperty, Class<?>... validMatches) {
|
||||
private static void potentiallyAssertDBRefTargetType(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
if (persistentProperty.isDbReference() && persistentProperty.getDBRef().lazy()) {
|
||||
if (persistentProperty.isArray() || Modifier.isFinal(persistentProperty.getActualType().getModifiers())) {
|
||||
throw new MappingException(String.format(
|
||||
"Invalid lazy DBRef property for %s. Found %s which must not be an array nor a final class.",
|
||||
persistentProperty.getField(), persistentProperty.getActualType()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertPropertyType(MongoPersistentProperty persistentProperty, Class<?>... validMatches) {
|
||||
|
||||
for (Class<?> potentialMatch : validMatches) {
|
||||
if (ClassUtils.isAssignable(potentialMatch, persistentProperty.getActualType())) {
|
||||
@@ -334,10 +351,9 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
}
|
||||
}
|
||||
|
||||
throw new MappingException(String.format("Missmatching types for %s. Found %s expected one of %s.",
|
||||
persistentProperty.getField(), persistentProperty.getActualType(),
|
||||
StringUtils.arrayToCommaDelimitedString(validMatches)));
|
||||
throw new MappingException(
|
||||
String.format("Missmatching types for %s. Found %s expected one of %s.", persistentProperty.getField(),
|
||||
persistentProperty.getActualType(), StringUtils.arrayToCommaDelimitedString(validMatches)));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,8 +1,25 @@
|
||||
/*
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation to define custom metadata for document fields.
|
||||
@@ -11,6 +28,7 @@ import java.lang.annotation.RetentionPolicy;
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
public @interface Field {
|
||||
|
||||
/**
|
||||
|
||||
@@ -45,7 +45,7 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
int getFieldOrder();
|
||||
|
||||
/**
|
||||
* Returns whether the propert is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* Returns whether the property is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* {@link #getDBRef()} to return an non-{@literal null} value.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2017 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,8 @@ import org.bson.types.Binary;
|
||||
import org.bson.types.CodeWScope;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.util.MongoClientVersion;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.DBRef;
|
||||
@@ -34,6 +36,7 @@ import com.mongodb.DBRef;
|
||||
* Simple constant holder for a {@link SimpleTypeHolder} enriched with Mongo specific simple types.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class MongoSimpleTypes {
|
||||
|
||||
@@ -54,12 +57,17 @@ public abstract class MongoSimpleTypes {
|
||||
simpleTypes.add(Pattern.class);
|
||||
simpleTypes.add(Binary.class);
|
||||
simpleTypes.add(UUID.class);
|
||||
|
||||
if (MongoClientVersion.isMongo34Driver()) {
|
||||
simpleTypes
|
||||
.add(ClassUtils.resolveClassName("org.bson.types.Decimal128", MongoSimpleTypes.class.getClassLoader()));
|
||||
}
|
||||
|
||||
MONGO_SIMPLE_TYPES = Collections.unmodifiableSet(simpleTypes);
|
||||
}
|
||||
|
||||
private static final Set<Class<?>> MONGO_SIMPLE_TYPES;
|
||||
public static final SimpleTypeHolder HOLDER = new SimpleTypeHolder(MONGO_SIMPLE_TYPES, true);
|
||||
|
||||
private MongoSimpleTypes() {
|
||||
}
|
||||
private MongoSimpleTypes() {}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,6 +21,7 @@ import com.mongodb.DBObject;
|
||||
* Base class for delete events.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class AbstractDeleteEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
|
||||
@@ -31,11 +32,25 @@ public abstract class AbstractDeleteEvent<T> extends MongoMappingEvent<DBObject>
|
||||
* Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type , possibly be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AbstractDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AbstractDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
super(dbo, dbo);
|
||||
/**
|
||||
* Creates a new {@link AbstractDeleteEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AbstractDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
|
||||
super(dbo, dbo, collectionName);
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2013 by the original author(s).
|
||||
* Copyright 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.DBObject;
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class AbstractMongoEventListener<E> implements ApplicationListener<MongoMappingEvent<?>> {
|
||||
|
||||
@@ -46,14 +47,14 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent)
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public void onApplicationEvent(MongoMappingEvent<?> event) {
|
||||
|
||||
if (event instanceof AfterLoadEvent) {
|
||||
AfterLoadEvent<?> afterLoadEvent = (AfterLoadEvent<?>) event;
|
||||
|
||||
if (domainClass.isAssignableFrom(afterLoadEvent.getType())) {
|
||||
onAfterLoad(event.getDBObject());
|
||||
onAfterLoad((AfterLoadEvent<E>) event);
|
||||
}
|
||||
|
||||
return;
|
||||
@@ -65,18 +66,18 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
|
||||
if (eventDomainType != null && domainClass.isAssignableFrom(eventDomainType)) {
|
||||
if (event instanceof BeforeDeleteEvent) {
|
||||
onBeforeDelete(event.getDBObject());
|
||||
onBeforeDelete((BeforeDeleteEvent<E>) event);
|
||||
}
|
||||
if (event instanceof AfterDeleteEvent) {
|
||||
onAfterDelete(event.getDBObject());
|
||||
onAfterDelete((AfterDeleteEvent<E>) event);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
E source = (E) event.getSource();
|
||||
Object source = event.getSource();
|
||||
|
||||
// Check for matching domain type and invoke callbacks
|
||||
if (source != null && !domainClass.isAssignableFrom(source.getClass())) {
|
||||
@@ -84,55 +85,185 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
}
|
||||
|
||||
if (event instanceof BeforeConvertEvent) {
|
||||
onBeforeConvert(source);
|
||||
onBeforeConvert((BeforeConvertEvent<E>) event);
|
||||
} else if (event instanceof BeforeSaveEvent) {
|
||||
onBeforeSave(source, event.getDBObject());
|
||||
onBeforeSave((BeforeSaveEvent<E>) event);
|
||||
} else if (event instanceof AfterSaveEvent) {
|
||||
onAfterSave(source, event.getDBObject());
|
||||
onAfterSave((AfterSaveEvent<E>) event);
|
||||
} else if (event instanceof AfterConvertEvent) {
|
||||
onAfterConvert(event.getDBObject(), source);
|
||||
onAfterConvert((AfterConvertEvent<E>) event);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element before conversion.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeConvert(BeforeConvertEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeConvert(E source) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeConvert({})", source);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param event never {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeConvert(BeforeConvertEvent<E> event) {
|
||||
onBeforeConvert(event.getSource());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element and {@link com.mongodb.DBObject} representation before save.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeSave(BeforeSaveEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeSave(E source, DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeSave({}, {})", source, dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeSave(BeforeSaveEvent<E> event) {
|
||||
onBeforeSave(event.getSource(), event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures source element and {@link com.mongodb.DBObject} representation after save.
|
||||
*
|
||||
* @param source will never be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterSave(AfterSaveEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterSave(E source, DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterSave({}, {})", source, dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterSave(AfterSaveEvent<E> event) {
|
||||
onAfterSave(event.getSource(), event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures raw {@link com.mongodb.DBObject} when read from MongoDB.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterLoad(AfterLoadEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterLoad(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterLoad({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterLoadEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterLoad(AfterLoadEvent<E> event) {
|
||||
onAfterLoad(event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures raw {@link com.mongodb.DBObject} and converted domain type after conversion.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source will never be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterConvert(AfterConvertEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterConvert(DBObject dbo, E source) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({}, {})", dbo, source);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterConvert(AfterConvertEvent<E> event) {
|
||||
onAfterConvert(event.getDBObject(), event.getSource());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link com.mongodb.DBObject} after delete.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onAfterDelete(AfterDeleteEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onAfterDelete(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures {@link AfterDeleteEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onAfterDelete(AfterDeleteEvent<E> event) {
|
||||
onAfterDelete(event.getDBObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture {@link com.mongodb.DBObject} before delete.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #onBeforeDelete(BeforeDeleteEvent)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void onBeforeDelete(DBObject dbo) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeDelete({})", dbo);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture {@link BeforeDeleteEvent}.
|
||||
*
|
||||
* @param event will never be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public void onBeforeDelete(BeforeDeleteEvent<E> event) {
|
||||
onBeforeDelete(event.getDBObject());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,20 +13,42 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} thrown after convert of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterConvertEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterConvertEvent(DBObject, Object, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterConvertEvent(DBObject dbo, E source) {
|
||||
super(source, dbo);
|
||||
this(dbo, source, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterConvertEvent}.
|
||||
*
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterConvertEvent(DBObject dbo, E source, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import com.mongodb.DBObject;
|
||||
* will be the query document <em>after</am> it has been mapped onto the domain type handled.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
|
||||
@@ -32,8 +33,22 @@ public class AfterDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
super(dbo, type);
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AfterDeleteEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
super(dbo, type, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,7 @@ import com.mongodb.DBObject;
|
||||
* @author Oliver Gierke
|
||||
* @author Jon Brisbin
|
||||
* @author Christoph Leiter
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterLoadEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
|
||||
@@ -36,11 +37,25 @@ public class AfterLoadEvent<T> extends MongoMappingEvent<DBObject> {
|
||||
* Creates a new {@link AfterLoadEvent} for the given {@link DBObject} and type.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterLoadEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterLoadEvent(DBObject dbo, Class<T> type) {
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
super(dbo, dbo);
|
||||
/**
|
||||
* Creates a new {@link AfterLoadEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterLoadEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
|
||||
super(dbo, dbo, collectionName);
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
this.type = type;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,14 +19,37 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} triggered after save of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class AfterSaveEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterSaveEvent}
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #AfterSaveEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public AfterSaveEvent(E source, DBObject dbo) {
|
||||
super(source, dbo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public AfterSaveEvent(E source, DBObject dbo, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2012 the original author or authors.
|
||||
* Copyright 2011-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -20,12 +20,31 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeConvertEvent<T> extends MongoMappingEvent<T> {
|
||||
|
||||
private static final long serialVersionUID = 252614269008845243L;
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeConvertEvent(Object, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeConvertEvent(T source) {
|
||||
super(source, null);
|
||||
this(source, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeConvertEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeConvertEvent(T source, String collectionName) {
|
||||
super(source, null, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013 by the original author(s).
|
||||
* Copyright 2013-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import com.mongodb.DBObject;
|
||||
* document <em>before</em> being mapped based on the domain class handled.
|
||||
*
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
|
||||
@@ -32,8 +33,22 @@ public class BeforeDeleteEvent<T> extends AbstractDeleteEvent<T> {
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeDeleteEvent(DBObject, Class, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeDeleteEvent(DBObject dbo, Class<T> type) {
|
||||
super(dbo, type);
|
||||
this(dbo, type, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link BeforeDeleteEvent} for the given {@link DBObject}, type and collectionName.
|
||||
*
|
||||
* @param dbo must not be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeDeleteEvent(DBObject dbo, Class<T> type, String collectionName) {
|
||||
super(dbo, type, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,14 +19,37 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link MongoMappingEvent} triggered before save of a document.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class BeforeSaveEvent<E> extends MongoMappingEvent<E> {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #BeforeSaveEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public BeforeSaveEvent(E source, DBObject dbo) {
|
||||
super(source, dbo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
* @since 1.8
|
||||
*/
|
||||
public BeforeSaveEvent(E source, DBObject dbo, String collectionName) {
|
||||
super(source, dbo, collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2011 by the original author(s).
|
||||
* Copyright (c) 2011-2015 by the original author(s).
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,26 +16,69 @@
|
||||
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Base {@link ApplicationEvent} triggered by Spring Data MongoDB.
|
||||
*
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoMappingEvent<T> extends ApplicationEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private final DBObject dbo;
|
||||
private final String collectionName;
|
||||
|
||||
/**
|
||||
* Creates new {@link MongoMappingEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @deprecated since 1.8. Please use {@link #MongoMappingEvent(Object, DBObject, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoMappingEvent(T source, DBObject dbo) {
|
||||
super(source);
|
||||
this.dbo = dbo;
|
||||
this(source, dbo, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link MongoMappingEvent}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param dbo can be {@literal null}.
|
||||
* @param collectionName can be {@literal null}.
|
||||
*/
|
||||
public MongoMappingEvent(T source, DBObject dbo, String collectionName) {
|
||||
|
||||
super(source);
|
||||
this.dbo = dbo;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal null} if not set.
|
||||
*/
|
||||
public DBObject getDBObject() {
|
||||
return dbo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the collection the event refers to.
|
||||
*
|
||||
* @return {@literal null} if not set.
|
||||
* @since 1.8
|
||||
*/
|
||||
public String getCollectionName() {
|
||||
return collectionName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.util.EventObject#getSource()
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
@Override
|
||||
public T getSource() {
|
||||
|
||||
@@ -45,6 +45,8 @@ public class MapReduceOptions {
|
||||
|
||||
private Boolean verbose = true;
|
||||
|
||||
private Integer limit;
|
||||
|
||||
private Map<String, Object> extraOptions = new HashMap<String, Object>();
|
||||
|
||||
/**
|
||||
@@ -64,6 +66,8 @@ public class MapReduceOptions {
|
||||
* @return MapReduceOptions so that methods can be chained in a fluent API style
|
||||
*/
|
||||
public MapReduceOptions limit(int limit) {
|
||||
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -247,6 +251,15 @@ public class MapReduceOptions {
|
||||
return this.scopeVariables;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum number of documents for the input into the map function.
|
||||
*
|
||||
* @return {@literal null} if not set.
|
||||
*/
|
||||
public Integer getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
public DBObject getOptionsObject() {
|
||||
BasicDBObject cmd = new BasicDBObject();
|
||||
|
||||
@@ -264,6 +277,10 @@ public class MapReduceOptions {
|
||||
cmd.put("scope", scopeVariables);
|
||||
}
|
||||
|
||||
if (limit != null) {
|
||||
cmd.put("limit", limit);
|
||||
}
|
||||
|
||||
if (!extraOptions.keySet().isEmpty()) {
|
||||
cmd.putAll(extraOptions);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2011 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -17,13 +17,10 @@ package org.springframework.data.mongodb.core.mapreduce;
|
||||
|
||||
public class MapReduceTiming {
|
||||
|
||||
private long mapTime;
|
||||
|
||||
private long emitLoopTime;
|
||||
|
||||
private long totalTime;
|
||||
private long mapTime, emitLoopTime, totalTime;
|
||||
|
||||
public MapReduceTiming(long mapTime, long emitLoopTime, long totalTime) {
|
||||
|
||||
this.mapTime = mapTime;
|
||||
this.emitLoopTime = emitLoopTime;
|
||||
this.totalTime = totalTime;
|
||||
@@ -41,37 +38,52 @@ public class MapReduceTiming {
|
||||
return totalTime;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MapReduceTiming [mapTime=" + mapTime + ", emitLoopTime=" + emitLoopTime + ", totalTime=" + totalTime + "]";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
|
||||
result = prime * result + (int) (emitLoopTime ^ (emitLoopTime >>> 32));
|
||||
result = prime * result + (int) (mapTime ^ (mapTime >>> 32));
|
||||
result = prime * result + (int) (totalTime ^ (totalTime >>> 32));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
MapReduceTiming other = (MapReduceTiming) obj;
|
||||
if (emitLoopTime != other.emitLoopTime)
|
||||
return false;
|
||||
if (mapTime != other.mapTime)
|
||||
return false;
|
||||
if (totalTime != other.totalTime)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof MapReduceTiming)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MapReduceTiming that = (MapReduceTiming) obj;
|
||||
|
||||
return this.emitLoopTime == that.emitLoopTime && //
|
||||
this.mapTime == that.mapTime && //
|
||||
this.totalTime == that.totalTime;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -28,6 +28,7 @@ import com.mongodb.util.JSON;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author John Willemin
|
||||
*/
|
||||
public class BasicQuery extends Query {
|
||||
|
||||
@@ -70,6 +71,19 @@ public class BasicQuery extends Query {
|
||||
|
||||
@Override
|
||||
public DBObject getFieldsObject() {
|
||||
|
||||
if (fieldsObject == null) {
|
||||
return super.getFieldsObject();
|
||||
}
|
||||
|
||||
if (super.getFieldsObject() != null) {
|
||||
|
||||
DBObject combinedFieldsObject = new BasicDBObject();
|
||||
combinedFieldsObject.putAll(fieldsObject);
|
||||
combinedFieldsObject.putAll(super.getFieldsObject());
|
||||
return combinedFieldsObject;
|
||||
}
|
||||
|
||||
return fieldsObject;
|
||||
}
|
||||
|
||||
|
||||
@@ -22,9 +22,11 @@ import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BSON;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.geo.Circle;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.geo.Shape;
|
||||
@@ -87,6 +89,30 @@ public class Criteria implements CriteriaDefinition {
|
||||
return new Criteria(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link Criteria} matching an example object.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @return
|
||||
* @see Criteria#alike(Example)
|
||||
* @since 1.8
|
||||
*/
|
||||
public static Criteria byExample(Object example) {
|
||||
return byExample(Example.of(example));
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link Criteria} matching an example object.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @return
|
||||
* @see Criteria#alike(Example)
|
||||
* @since 1.8
|
||||
*/
|
||||
public static Criteria byExample(Example<?> example) {
|
||||
return new Criteria().alike(example);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a Criteria using the provided key
|
||||
*
|
||||
@@ -118,7 +144,7 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
private boolean lastOperatorWasNot() {
|
||||
return this.criteria.size() > 0 && "$not".equals(this.criteria.keySet().toArray()[this.criteria.size() - 1]);
|
||||
return !this.criteria.isEmpty() && "$not".equals(this.criteria.keySet().toArray()[this.criteria.size() - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -433,7 +459,23 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geospatical criterion using a {@literal $maxDistance} operation, for use with $near
|
||||
* Creates criterion using {@code $geoIntersects} operator which matches intersections of the given {@code geoJson}
|
||||
* structure and the documents one. Requires MongoDB 2.4 or better.
|
||||
*
|
||||
* @param geoJson must not be {@literal null}.
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public Criteria intersects(GeoJson geoJson) {
|
||||
|
||||
Assert.notNull(geoJson, "GeoJson must not be null!");
|
||||
criteria.put("$geoIntersects", geoJson);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geo-spatial criterion using a {@literal $maxDistance} operation, for use with $near
|
||||
*
|
||||
* @see http://docs.mongodb.org/manual/reference/operator/query/maxDistance/
|
||||
* @param maxDistance
|
||||
@@ -481,6 +523,20 @@ public class Criteria implements CriteriaDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the given object as a pattern.
|
||||
*
|
||||
* @param sample
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
public Criteria alike(Example<?> sample) {
|
||||
|
||||
criteria.put("$sample", sample);
|
||||
this.criteriaChain.add(this);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an 'or' criteria using the $or operator for all of the provided criteria
|
||||
* <p>
|
||||
@@ -565,9 +621,10 @@ public class Criteria implements CriteriaDefinition {
|
||||
DBObject dbo = new BasicDBObject();
|
||||
boolean not = false;
|
||||
|
||||
for (String k : this.criteria.keySet()) {
|
||||
for (Entry<String, Object> entry : criteria.entrySet()) {
|
||||
|
||||
Object value = this.criteria.get(k);
|
||||
String key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
|
||||
if (requiresGeoJsonFormat(value)) {
|
||||
value = new BasicDBObject("$geometry", value);
|
||||
@@ -575,14 +632,14 @@ public class Criteria implements CriteriaDefinition {
|
||||
|
||||
if (not) {
|
||||
DBObject notDbo = new BasicDBObject();
|
||||
notDbo.put(k, value);
|
||||
notDbo.put(key, value);
|
||||
dbo.put("$not", notDbo);
|
||||
not = false;
|
||||
} else {
|
||||
if ("$not".equals(k) && value == null) {
|
||||
if ("$not".equals(key) && value == null) {
|
||||
not = true;
|
||||
} else {
|
||||
dbo.put(k, value);
|
||||
dbo.put(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2013 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -83,14 +83,10 @@ public class Field {
|
||||
|
||||
public DBObject getFieldsObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
DBObject dbo = new BasicDBObject(criteria);
|
||||
|
||||
for (String k : criteria.keySet()) {
|
||||
dbo.put(k, criteria.get(k));
|
||||
}
|
||||
|
||||
for (String k : slices.keySet()) {
|
||||
dbo.put(k, new BasicDBObject("$slice", slices.get(k)));
|
||||
for (Entry<String, Object> entry : slices.entrySet()) {
|
||||
dbo.put(entry.getKey(), new BasicDBObject("$slice", entry.getValue()));
|
||||
}
|
||||
|
||||
for (Entry<String, Criteria> entry : elemMatchs.entrySet()) {
|
||||
@@ -134,8 +130,8 @@ public class Field {
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean samePositionKey = this.postionKey == null ? that.postionKey == null : this.postionKey
|
||||
.equals(that.postionKey);
|
||||
boolean samePositionKey = this.postionKey == null ? that.postionKey == null
|
||||
: this.postionKey.equals(that.postionKey);
|
||||
boolean samePositionValue = this.positionValue == that.positionValue;
|
||||
|
||||
return samePositionKey && samePositionValue;
|
||||
|
||||
@@ -0,0 +1,106 @@
|
||||
/*
|
||||
* Copyright 2015-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.8
|
||||
*/
|
||||
public enum MongoRegexCreator {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}");
|
||||
|
||||
/**
|
||||
* Creates a regular expression String to be used with {@code $regex}.
|
||||
*
|
||||
* @param source the plain String
|
||||
* @param type
|
||||
* @return {@literal source} when {@literal source} or {@literal type} is {@literal null}.
|
||||
*/
|
||||
public String toRegularExpression(String source, Type type) {
|
||||
|
||||
if (type == null || source == null) {
|
||||
return source;
|
||||
}
|
||||
|
||||
String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, type);
|
||||
|
||||
switch (type) {
|
||||
case STARTING_WITH:
|
||||
regex = "^" + regex;
|
||||
break;
|
||||
case ENDING_WITH:
|
||||
regex = regex + "$";
|
||||
break;
|
||||
case CONTAINING:
|
||||
case NOT_CONTAINING:
|
||||
regex = ".*" + regex + ".*";
|
||||
break;
|
||||
case SIMPLE_PROPERTY:
|
||||
case NEGATING_SIMPLE_PROPERTY:
|
||||
regex = "^" + regex + "$";
|
||||
default:
|
||||
}
|
||||
|
||||
return regex;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, Type type) {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(Type.REGEX, type)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(Type.LIKE, type)) {
|
||||
return PUNCTATION_PATTERN.matcher(source).find() ? Pattern.quote(source) : source;
|
||||
}
|
||||
|
||||
if (source.equals("*")) {
|
||||
return ".*";
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
boolean leadingWildcard = source.startsWith("*");
|
||||
boolean trailingWildcard = source.endsWith("*");
|
||||
|
||||
String valueToUse = source.substring(leadingWildcard ? 1 : 0,
|
||||
trailingWildcard ? source.length() - 1 : source.length());
|
||||
|
||||
if (PUNCTATION_PATTERN.matcher(valueToUse).find()) {
|
||||
valueToUse = Pattern.quote(valueToUse);
|
||||
}
|
||||
|
||||
if (leadingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
sb.append(valueToUse);
|
||||
if (trailingWildcard) {
|
||||
sb.append(".*");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -94,9 +94,9 @@ public class Query {
|
||||
if (existing == null) {
|
||||
this.criteria.put(key, criteriaDefinition);
|
||||
} else {
|
||||
throw new InvalidMongoDbApiUsageException("Due to limitations of the com.mongodb.BasicDBObject, "
|
||||
+ "you can't add a second '" + key + "' criteria. " + "Query already contains '"
|
||||
+ existing.getCriteriaObject() + "'.");
|
||||
throw new InvalidMongoDbApiUsageException(
|
||||
"Due to limitations of the com.mongodb.BasicDBObject, " + "you can't add a second '" + key + "' criteria. "
|
||||
+ "Query already contains '" + existing.getCriteriaObject() + "'.");
|
||||
}
|
||||
|
||||
return this;
|
||||
@@ -176,7 +176,7 @@ public class Query {
|
||||
|
||||
for (Order order : sort) {
|
||||
if (order.isIgnoreCase()) {
|
||||
throw new IllegalArgumentException(String.format("Gven sort contained an Order for %s with ignore case! "
|
||||
throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case! "
|
||||
+ "MongoDB does not support sorting ignoreing case currently!", order.getProperty()));
|
||||
}
|
||||
}
|
||||
@@ -221,10 +221,8 @@ public class Query {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
|
||||
for (String k : criteria.keySet()) {
|
||||
CriteriaDefinition c = criteria.get(k);
|
||||
DBObject cl = c.getCriteriaObject();
|
||||
dbo.putAll(cl);
|
||||
for (CriteriaDefinition definition : criteria.values()) {
|
||||
dbo.putAll(definition.getCriteriaObject());
|
||||
}
|
||||
|
||||
if (!restrictedTypes.isEmpty()) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012 the original author or authors.
|
||||
* Copyright 2012-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,12 +16,15 @@
|
||||
package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
import com.mongodb.util.JSON;
|
||||
|
||||
@@ -29,6 +32,7 @@ import com.mongodb.util.JSON;
|
||||
* Utility methods for JSON serialization.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public abstract class SerializationUtils {
|
||||
|
||||
@@ -36,6 +40,68 @@ public abstract class SerializationUtils {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens out a given {@link DBObject}.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* _id : 1
|
||||
* nested : { value : "conflux"}
|
||||
* }
|
||||
* </code>
|
||||
* will result in
|
||||
* <code>
|
||||
* {
|
||||
* _id : 1
|
||||
* nested.value : "conflux"
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return {@link Collections#emptyMap()} when source is {@literal null}
|
||||
* @since 1.8
|
||||
*/
|
||||
public static Map<String, Object> flattenMap(DBObject source) {
|
||||
|
||||
if (source == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
Map<String, Object> result = new HashMap<String, Object>();
|
||||
toFlatMap("", source, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void toFlatMap(String currentPath, Object source, Map<String, Object> map) {
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
|
||||
BasicDBObject dbo = (BasicDBObject) source;
|
||||
Iterator<Map.Entry<String, Object>> iter = dbo.entrySet().iterator();
|
||||
String pathPrefix = currentPath.isEmpty() ? "" : currentPath + ".";
|
||||
|
||||
while (iter.hasNext()) {
|
||||
|
||||
Map.Entry<String, Object> entry = iter.next();
|
||||
|
||||
if (entry.getKey().startsWith("$")) {
|
||||
if (map.containsKey(currentPath)) {
|
||||
((BasicDBObject) map.get(currentPath)).put(entry.getKey(), entry.getValue());
|
||||
} else {
|
||||
map.put(currentPath, new BasicDBObject(entry.getKey(), entry.getValue()));
|
||||
}
|
||||
} else {
|
||||
|
||||
toFlatMap(pathPrefix + entry.getKey(), entry.getValue(), map);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
map.put(currentPath, source);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes the given object into pseudo-JSON meaning it's trying to create a JSON representation as far as possible
|
||||
* but falling back to the given object's {@link Object#toString()} method if it's not serializable. Useful for
|
||||
|
||||
@@ -63,7 +63,7 @@ public class TextCriteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* For a full list of supported languages see the mongdodb reference manual for <a
|
||||
* For a full list of supported languages see the mongodb reference manual for <a
|
||||
* href="http://docs.mongodb.org/manual/reference/text-search-languages/">Text Search Languages</a>.
|
||||
*
|
||||
* @param language
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -64,7 +64,7 @@ public class Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exlude fields from making
|
||||
* Creates an {@link Update} instance from the given {@link DBObject}. Allows to explicitly exclude fields from making
|
||||
* it into the created {@link Update} object. Note, that this will set attributes directly and <em>not</em> use
|
||||
* {@literal $set}. This means fields not given in the {@link DBObject} will be nulled when executing the update. To
|
||||
* create an only-updating {@link Update} instance of a {@link DBObject}, call {@link #set(String, Object)} for each
|
||||
@@ -254,7 +254,7 @@ public class Update {
|
||||
* @return
|
||||
*/
|
||||
public Update pullAll(String key, Object[] values) {
|
||||
addFieldOperation("$pullAll", key, Arrays.copyOf(values, values.length));
|
||||
addMultiFieldOperation("$pullAll", key, Arrays.copyOf(values, values.length));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -327,17 +327,22 @@ public class Update {
|
||||
}
|
||||
|
||||
public DBObject getUpdateObject() {
|
||||
|
||||
DBObject dbo = new BasicDBObject();
|
||||
for (String k : modifierOps.keySet()) {
|
||||
dbo.put(k, modifierOps.get(k));
|
||||
}
|
||||
return dbo;
|
||||
return new BasicDBObject(modifierOps);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is not called anymore rather override {@link #addMultiFieldOperation(String, String, Object)}.
|
||||
*
|
||||
* @param operator
|
||||
* @param key
|
||||
* @param value
|
||||
* @deprectaed Use {@link #addMultiFieldOperation(String, String, Object)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
protected void addFieldOperation(String operator, String key, Object value) {
|
||||
|
||||
Assert.hasText(key, "Key/Path for update must not be null or blank.");
|
||||
|
||||
modifierOps.put(operator, new BasicDBObject(key, value));
|
||||
this.keysToUpdate.add(key);
|
||||
}
|
||||
@@ -355,8 +360,8 @@ public class Update {
|
||||
if (existingValue instanceof BasicDBObject) {
|
||||
keyValueMap = (BasicDBObject) existingValue;
|
||||
} else {
|
||||
throw new InvalidDataAccessApiUsageException("Modifier Operations should be a LinkedHashMap but was "
|
||||
+ existingValue.getClass());
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
"Modifier Operations should be a LinkedHashMap but was " + existingValue.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -45,6 +45,7 @@ import com.mongodb.gridfs.GridFSInputFile;
|
||||
* @author Philipp Schneider
|
||||
* @author Thomas Darimont
|
||||
* @author Martin Baumgartner
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver {
|
||||
|
||||
@@ -182,7 +183,7 @@ public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver
|
||||
public List<GridFSDBFile> find(Query query) {
|
||||
|
||||
if (query == null) {
|
||||
return getGridFs().find((DBObject) null);
|
||||
return getGridFs().find(new BasicDBObject());
|
||||
}
|
||||
|
||||
DBObject queryObject = getMappedQuery(query.getQueryObject());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2012 the original author or authors.
|
||||
* Copyright 2002-2015 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,6 +29,7 @@ import com.mongodb.MongoException;
|
||||
* Base class to encapsulate common configuration settings when connecting to a database
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
public abstract class AbstractMonitor {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014 the original author or authors.
|
||||
* Copyright 2014-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,10 +25,11 @@ import org.springframework.data.annotation.QueryAnnotation;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.6
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
@Documented
|
||||
@QueryAnnotation
|
||||
public @interface Meta {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2014 the original author or authors.
|
||||
* Copyright 2010-2017 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,19 +18,23 @@ package org.springframework.data.mongodb.repository;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.repository.NoRepositoryBean;
|
||||
import org.springframework.data.repository.PagingAndSortingRepository;
|
||||
import org.springframework.data.repository.query.QueryByExampleExecutor;
|
||||
|
||||
/**
|
||||
* Mongo specific {@link org.springframework.data.repository.Repository} interface.
|
||||
*
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@NoRepositoryBean
|
||||
public interface MongoRepository<T, ID extends Serializable> extends PagingAndSortingRepository<T, ID> {
|
||||
public interface MongoRepository<T, ID extends Serializable>
|
||||
extends PagingAndSortingRepository<T, ID>, QueryByExampleExecutor<T> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -51,10 +55,10 @@ public interface MongoRepository<T, ID extends Serializable> extends PagingAndSo
|
||||
List<T> findAll(Sort sort);
|
||||
|
||||
/**
|
||||
* Inserts the given a given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use
|
||||
* Inserts the given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use
|
||||
* the returned instance for further operations as the save operation might have changed the entity instance
|
||||
* completely. Prefer using {@link #save(Object)} instead to avoid the usage of store-specific API.
|
||||
*
|
||||
*
|
||||
* @param entity must not be {@literal null}.
|
||||
* @return the saved entity
|
||||
* @since 1.7
|
||||
@@ -65,10 +69,21 @@ public interface MongoRepository<T, ID extends Serializable> extends PagingAndSo
|
||||
* Inserts the given entities. Assumes the given entities to have not been persisted yet and thus will optimize the
|
||||
* insert over a call to {@link #save(Iterable)}. Prefer using {@link #save(Iterable)} to avoid the usage of store
|
||||
* specific API.
|
||||
*
|
||||
*
|
||||
* @param entities must not be {@literal null}.
|
||||
* @return the saved entities
|
||||
* @since 1.7
|
||||
*/
|
||||
<S extends T> List<S> insert(Iterable<S> entities);
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
|
||||
*/
|
||||
<S extends T> List<S> findAll(Example<S> example);
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
|
||||
*/
|
||||
<S extends T> List<S> findAll(Example<S> example, Sort sort);
|
||||
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2014 the original author or authors.
|
||||
* Copyright 2011-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,15 +30,16 @@ import org.springframework.data.annotation.QueryAnnotation;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.METHOD)
|
||||
@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
@Documented
|
||||
@QueryAnnotation
|
||||
public @interface Query {
|
||||
|
||||
/**
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precendece over the
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the
|
||||
* method name then.
|
||||
*
|
||||
* @return
|
||||
|
||||
@@ -27,6 +27,7 @@ import org.springframework.context.annotation.ComponentScan.Filter;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
|
||||
import org.springframework.data.repository.config.DefaultRepositoryBaseClass;
|
||||
import org.springframework.data.repository.query.QueryLookupStrategy;
|
||||
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
|
||||
|
||||
@@ -46,7 +47,7 @@ public @interface EnableMongoRepositories {
|
||||
|
||||
/**
|
||||
* Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.:
|
||||
* {@code @EnableJpaRepositories("org.my.pkg")} instead of {@code @EnableJpaRepositories(basePackages="org.my.pkg")}.
|
||||
* {@code @EnableMongoRepositories("org.my.pkg")} instead of {@code @EnableMongoRepositories(basePackages="org.my.pkg")}.
|
||||
*/
|
||||
String[] value() default {};
|
||||
|
||||
@@ -107,6 +108,14 @@ public @interface EnableMongoRepositories {
|
||||
*/
|
||||
Class<?> repositoryFactoryBeanClass() default MongoRepositoryFactoryBean.class;
|
||||
|
||||
/**
|
||||
* Configure the repository base class to be used to create repository proxies for this particular configuration.
|
||||
*
|
||||
* @return
|
||||
* @since 1.8
|
||||
*/
|
||||
Class<?> repositoryBaseClass() default DefaultRepositoryBaseClass.class;
|
||||
|
||||
/**
|
||||
* Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2014 the original author or authors.
|
||||
* Copyright 2012-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,21 +19,15 @@ import java.lang.annotation.Annotation;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
import org.springframework.core.annotation.AnnotationAttributes;
|
||||
import org.springframework.data.config.ParsingUtils;
|
||||
import org.springframework.data.mongodb.config.BeanNames;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
|
||||
import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource;
|
||||
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
|
||||
import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport;
|
||||
import org.springframework.data.repository.config.RepositoryConfigurationSource;
|
||||
import org.springframework.data.repository.config.XmlRepositoryConfigurationSource;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -47,8 +41,6 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati
|
||||
private static final String MONGO_TEMPLATE_REF = "mongo-template-ref";
|
||||
private static final String CREATE_QUERY_INDEXES = "create-query-indexes";
|
||||
|
||||
private boolean fallbackMappingContextCreated = false;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModuleName()
|
||||
@@ -81,7 +73,7 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati
|
||||
*/
|
||||
@Override
|
||||
protected Collection<Class<? extends Annotation>> getIdentifyingAnnotations() {
|
||||
return Collections.<Class<? extends Annotation>> singleton(Document.class);
|
||||
return Collections.<Class<? extends Annotation>>singleton(Document.class);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -90,19 +82,7 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati
|
||||
*/
|
||||
@Override
|
||||
protected Collection<Class<?>> getIdentifyingTypes() {
|
||||
return Collections.<Class<?>> singleton(MongoRepository.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.RepositoryConfigurationSource)
|
||||
*/
|
||||
@Override
|
||||
public void postProcess(BeanDefinitionBuilder builder, RepositoryConfigurationSource source) {
|
||||
|
||||
if (fallbackMappingContextCreated) {
|
||||
builder.addPropertyReference("mappingContext", BeanNames.MAPPING_CONTEXT_BEAN_NAME);
|
||||
}
|
||||
return Collections.<Class<?>>singleton(MongoRepository.class);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -130,23 +110,4 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati
|
||||
builder.addPropertyReference("mongoOperations", attributes.getString("mongoTemplateRef"));
|
||||
builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods"));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#registerBeansForRoot(org.springframework.beans.factory.support.BeanDefinitionRegistry, org.springframework.data.repository.config.RepositoryConfigurationSource)
|
||||
*/
|
||||
@Override
|
||||
public void registerBeansForRoot(BeanDefinitionRegistry registry, RepositoryConfigurationSource configurationSource) {
|
||||
|
||||
super.registerBeansForRoot(registry, configurationSource);
|
||||
|
||||
if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME)) {
|
||||
|
||||
RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class);
|
||||
definition.setRole(AbstractBeanDefinition.ROLE_INFRASTRUCTURE);
|
||||
definition.setSource(configurationSource.getSource());
|
||||
|
||||
registry.registerBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME, definition);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2015 the original author or authors.
|
||||
* Copyright 2010-2016 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,31 +15,25 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoPage;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.CollectionExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.DeleteExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.GeoNearExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagedExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagingGeoNearExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.ResultProcessingConverter;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.ResultProcessingExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SingleEntityExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SlicedExecution;
|
||||
import org.springframework.data.mongodb.repository.query.MongoQueryExecution.StreamExecution;
|
||||
import org.springframework.data.repository.query.ParameterAccessor;
|
||||
import org.springframework.data.repository.query.RepositoryQuery;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
/**
|
||||
* Base class for {@link RepositoryQuery} implementations for Mongo.
|
||||
*
|
||||
@@ -51,6 +45,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
private final MongoQueryMethod method;
|
||||
private final MongoOperations operations;
|
||||
private final EntityInstantiators instantiators;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AbstractMongoQuery} from the given {@link MongoQueryMethod} and {@link MongoOperations}.
|
||||
@@ -60,11 +55,12 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
*/
|
||||
public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations) {
|
||||
|
||||
Assert.notNull(operations);
|
||||
Assert.notNull(method);
|
||||
Assert.notNull(operations, "MongoOperations must not be null!");
|
||||
Assert.notNull(method, "MongoQueryMethod must not be null!");
|
||||
|
||||
this.method = method;
|
||||
this.operations = operations;
|
||||
this.instantiators = new EntityInstantiators();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -86,30 +82,53 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
|
||||
ResultProcessor processor = method.getResultProcessor().withDynamicProjection(accessor);
|
||||
String collection = method.getEntityInformation().getCollectionName();
|
||||
|
||||
MongoQueryExecution execution = getExecution(query, accessor,
|
||||
new ResultProcessingConverter(processor, operations, instantiators));
|
||||
|
||||
return execution.execute(query, processor.getReturnedType().getDomainType(), collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the execution instance to use.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param parameters must not be {@literal null}.
|
||||
* @param accessor must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private MongoQueryExecution getExecution(Query query, MongoParameterAccessor accessor,
|
||||
Converter<Object, Object> resultProcessing) {
|
||||
|
||||
if (method.isStreamQuery()) {
|
||||
return new StreamExecution().execute(query);
|
||||
} else if (isDeleteQuery()) {
|
||||
return new DeleteExecution().execute(query);
|
||||
return new StreamExecution(operations, resultProcessing);
|
||||
}
|
||||
|
||||
return new ResultProcessingExecution(getExecutionToWrap(query, accessor), resultProcessing);
|
||||
}
|
||||
|
||||
private MongoQueryExecution getExecutionToWrap(Query query, MongoParameterAccessor accessor) {
|
||||
|
||||
if (isDeleteQuery()) {
|
||||
return new DeleteExecution(operations, method);
|
||||
} else if (method.isGeoNearQuery() && method.isPageQuery()) {
|
||||
|
||||
MongoParameterAccessor countAccessor = new MongoParametersParameterAccessor(method, parameters);
|
||||
Query countQuery = createCountQuery(new ConvertingParameterAccessor(operations.getConverter(), countAccessor));
|
||||
|
||||
return new GeoNearExecution(accessor).execute(query, countQuery);
|
||||
return new PagingGeoNearExecution(operations, accessor, method.getReturnType(), this);
|
||||
} else if (method.isGeoNearQuery()) {
|
||||
return new GeoNearExecution(accessor).execute(query);
|
||||
return new GeoNearExecution(operations, accessor, method.getReturnType());
|
||||
} else if (method.isSliceQuery()) {
|
||||
return new SlicedExecution(accessor.getPageable()).execute(query);
|
||||
return new SlicedExecution(operations, accessor.getPageable());
|
||||
} else if (method.isCollectionQuery()) {
|
||||
return new CollectionExecution(accessor.getPageable()).execute(query);
|
||||
return new CollectionExecution(operations, accessor.getPageable());
|
||||
} else if (method.isPageQuery()) {
|
||||
return new PagedExecution(accessor.getPageable()).execute(query);
|
||||
return new PagedExecution(operations, accessor.getPageable());
|
||||
} else {
|
||||
return new SingleEntityExecution(isCountQuery()).execute(query);
|
||||
return new SingleEntityExecution(operations, isCountQuery());
|
||||
}
|
||||
}
|
||||
|
||||
private Query applyQueryMetaAttributesWhenPresent(Query query) {
|
||||
Query applyQueryMetaAttributesWhenPresent(Query query) {
|
||||
|
||||
if (method.hasQueryMetaAttributes()) {
|
||||
query.setMeta(method.getQueryMetaAttributes());
|
||||
@@ -127,12 +146,7 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
* @return
|
||||
*/
|
||||
protected Query createCountQuery(ConvertingParameterAccessor accessor) {
|
||||
|
||||
Query query = createQuery(accessor);
|
||||
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
|
||||
return query;
|
||||
return applyQueryMetaAttributesWhenPresent(createQuery(accessor));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -157,292 +171,4 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
* @since 1.5
|
||||
*/
|
||||
protected abstract boolean isDeleteQuery();
|
||||
|
||||
private abstract class Execution {
|
||||
|
||||
abstract Object execute(Query query);
|
||||
|
||||
protected List<?> readCollection(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
|
||||
String collectionName = metadata.getCollectionName();
|
||||
return operations.find(query, metadata.getJavaType(), collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for collection returning queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class CollectionExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
CollectionExecution(Pageable pageable) {
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(Query query) {
|
||||
return readCollection(query.with(pageable));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for {@link Slice} query methods.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @since 1.5
|
||||
*/
|
||||
|
||||
final class SlicedExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
SlicedExecution(Pageable pageable) {
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
int pageSize = pageable.getPageSize();
|
||||
|
||||
// Apply Pageable but tweak limit to peek into next page
|
||||
Query modifiedQuery = query.with(pageable).limit(pageSize + 1);
|
||||
|
||||
List result = operations.find(modifiedQuery, metadata.getJavaType(), metadata.getCollectionName());
|
||||
|
||||
boolean hasNext = result.size() > pageSize;
|
||||
|
||||
return new SliceImpl<Object>(hasNext ? result.subList(0, pageSize) : result, pageable, hasNext);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} for pagination queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class PagedExecution extends Execution {
|
||||
|
||||
private final Pageable pageable;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PagedExecution}.
|
||||
*
|
||||
* @param pageable
|
||||
*/
|
||||
public PagedExecution(Pageable pageable) {
|
||||
|
||||
Assert.notNull(pageable);
|
||||
this.pageable = pageable;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
String collectionName = metadata.getCollectionName();
|
||||
Class<?> type = metadata.getJavaType();
|
||||
|
||||
int overallLimit = query.getLimit();
|
||||
long count = operations.count(query, type, collectionName);
|
||||
count = overallLimit != 0 ? Math.min(count, query.getLimit()) : count;
|
||||
|
||||
boolean pageableOutOfScope = pageable.getOffset() > count;
|
||||
|
||||
if (pageableOutOfScope) {
|
||||
return new PageImpl<Object>(Collections.emptyList(), pageable, count);
|
||||
}
|
||||
|
||||
// Apply raw pagination
|
||||
query = query.with(pageable);
|
||||
|
||||
// Adjust limit if page would exceed the overall limit
|
||||
if (overallLimit != 0 && pageable.getOffset() + pageable.getPageSize() > overallLimit) {
|
||||
query.limit(overallLimit - pageable.getOffset());
|
||||
}
|
||||
|
||||
List<?> result = operations.find(query, type, collectionName);
|
||||
return new PageImpl(result, pageable, count);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} to return a single entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class SingleEntityExecution extends Execution {
|
||||
|
||||
private final boolean countProjection;
|
||||
|
||||
private SingleEntityExecution(boolean countProjection) {
|
||||
this.countProjection = countProjection;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return countProjection ? operations.count(query, metadata.getJavaType()) : operations.findOne(query,
|
||||
metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} to execute geo-near queries.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
final class GeoNearExecution extends Execution {
|
||||
|
||||
private final MongoParameterAccessor accessor;
|
||||
|
||||
public GeoNearExecution(MongoParameterAccessor accessor) {
|
||||
this.accessor = accessor;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
GeoResults<?> results = doExecuteQuery(query);
|
||||
return isListOfGeoResult() ? results.getContent() : results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} to return a page.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param countQuery must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
Object execute(Query query, Query countQuery) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
long count = operations.count(countQuery, metadata.getCollectionName());
|
||||
|
||||
return new GeoPage<Object>(doExecuteQuery(query), accessor.getPageable(), count);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private GeoResults<Object> doExecuteQuery(Query query) {
|
||||
|
||||
Point nearLocation = accessor.getGeoNearLocation();
|
||||
NearQuery nearQuery = NearQuery.near(nearLocation);
|
||||
|
||||
if (query != null) {
|
||||
nearQuery.query(query);
|
||||
}
|
||||
|
||||
Range<Distance> distances = accessor.getDistanceRange();
|
||||
Distance maxDistance = distances.getUpperBound();
|
||||
|
||||
if (maxDistance != null) {
|
||||
nearQuery.maxDistance(maxDistance).in(maxDistance.getMetric());
|
||||
}
|
||||
|
||||
Distance minDistance = distances.getLowerBound();
|
||||
|
||||
if (minDistance != null) {
|
||||
nearQuery.minDistance(minDistance).in(minDistance.getMetric());
|
||||
}
|
||||
|
||||
Pageable pageable = accessor.getPageable();
|
||||
if (pageable != null) {
|
||||
nearQuery.with(pageable);
|
||||
}
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return (GeoResults<Object>) operations.geoNear(nearQuery, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
private boolean isListOfGeoResult() {
|
||||
|
||||
TypeInformation<?> returnType = method.getReturnType();
|
||||
|
||||
if (!returnType.getType().equals(List.class)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TypeInformation<?> componentType = returnType.getComponentType();
|
||||
return componentType == null ? false : GeoResult.class.equals(componentType.getType());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Execution} removing documents matching the query.
|
||||
*
|
||||
* @since 1.5
|
||||
*/
|
||||
final class DeleteExecution extends Execution {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
Object execute(Query query) {
|
||||
|
||||
MongoEntityMetadata<?> metadata = method.getEntityInformation();
|
||||
return deleteAndConvertResult(query, metadata);
|
||||
}
|
||||
|
||||
private Object deleteAndConvertResult(Query query, MongoEntityMetadata<?> metadata) {
|
||||
|
||||
if (method.isCollectionQuery()) {
|
||||
return operations.findAllAndRemove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
}
|
||||
|
||||
WriteResult writeResult = operations.remove(query, metadata.getJavaType(), metadata.getCollectionName());
|
||||
return writeResult != null ? writeResult.getN() : 0L;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Thomas Darimont
|
||||
* @since 1.7
|
||||
*/
|
||||
final class StreamExecution extends Execution {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
Object execute(Query query) {
|
||||
|
||||
Class<?> entityType = getQueryMethod().getEntityInformation().getJavaType();
|
||||
|
||||
return StreamUtils.createStreamFromIterator((CloseableIterator<Object>) operations.stream(query, entityType));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user